IlyasMoutawwakil HF Staff commited on
Commit
d45f36a
·
verified ·
1 Parent(s): 0cb5655

Upload perf-df-bnb-1xA10.csv with huggingface_hub

Browse files
Files changed (1) hide show
  1. perf-df-bnb-1xA10.csv +40 -40
perf-df-bnb-1xA10.csv CHANGED
@@ -916,7 +916,7 @@ Traceback (most recent call last):
916
  hf_raise_for_status(response)
917
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
918
  raise HfHubHTTPError(message, response=response) from e
919
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2f3-5a35d956727d7a26709820f9;6852d2a5-6888-4034-b24c-4b02a4951967)
920
 
921
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
922
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
@@ -1051,7 +1051,7 @@ Traceback (most recent call last):
1051
  hf_raise_for_status(response)
1052
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
1053
  raise RepositoryNotFoundError(message, response) from e
1054
- huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6678c31d-13dc42ae2c9ed5c556f7e68f;3bb455c5-c4e6-4f8b-896f-e90dd643bc3d)
1055
 
1056
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
1057
  Please make sure you specified the correct `repo_id` and `repo_type`.
@@ -1112,7 +1112,7 @@ Traceback (most recent call last):
1112
  hf_raise_for_status(response)
1113
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
1114
  raise HfHubHTTPError(message, response=response) from e
1115
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2a1-034493b14ac8db456166cb90;b3fa3dfc-57f1-4101-8b64-c64c04fc901c)
1116
 
1117
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
1118
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
@@ -1260,7 +1260,7 @@ Traceback (most recent call last):
1260
  hf_raise_for_status(response)
1261
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
1262
  raise HfHubHTTPError(message, response=response) from e
1263
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2ca-332a58592e9021913f3fa627;f8ec8c0b-d073-479e-8756-5797d9aa9264)
1264
 
1265
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
1266
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
@@ -1754,7 +1754,7 @@ Traceback (most recent call last):
1754
  hf_raise_for_status(response)
1755
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
1756
  raise HfHubHTTPError(message, response=response) from e
1757
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c072-49de3d261fc4a4b6696dcf50;24003e7d-fc27-4c86-a567-bff9cb478e14)
1758
 
1759
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
1760
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
@@ -3275,7 +3275,7 @@ Traceback (most recent call last):
3275
  hf_raise_for_status(response)
3276
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
3277
  raise HfHubHTTPError(message, response=response) from e
3278
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2fb-6bf0f68710db868413413cbc;d1b96915-4f31-447d-906c-7f9354c3cbef)
3279
 
3280
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
3281
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
@@ -3470,7 +3470,7 @@ Traceback (most recent call last):
3470
  hf_raise_for_status(response)
3471
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
3472
  raise RepositoryNotFoundError(message, response) from e
3473
- huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6678c324-6b7d97de5a65e7be47f84f36;79e649ca-a563-4f0e-841c-664142193c16)
3474
 
3475
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
3476
  Please make sure you specified the correct `repo_id` and `repo_type`.
@@ -3531,7 +3531,7 @@ Traceback (most recent call last):
3531
  hf_raise_for_status(response)
3532
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
3533
  raise HfHubHTTPError(message, response=response) from e
3534
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2a8-1e35d0c35d3799ad7e8aab8e;0c4ecf7a-c3d8-4513-a8f5-4b6483b72977)
3535
 
3536
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
3537
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
@@ -3765,7 +3765,7 @@ Traceback (most recent call last):
3765
  hf_raise_for_status(response)
3766
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
3767
  raise HfHubHTTPError(message, response=response) from e
3768
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2d1-4c0e879322f479df6c347d9e;718d8f61-19ab-4eb8-bac6-a4f461402c6a)
3769
 
3770
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
3771
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
@@ -4610,7 +4610,7 @@ Traceback (most recent call last):
4610
  hf_raise_for_status(response)
4611
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
4612
  raise HfHubHTTPError(message, response=response) from e
4613
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c079-76381c3b4e5e94af0a3bbb92;ce4baefd-4295-4f13-afd3-82ff0ea84a71)
4614
 
4615
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
4616
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
@@ -5032,7 +5032,7 @@ ChildProcessError: Traceback (most recent call last):
5032
  cls._check_and_enable_flash_attn_2(
5033
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
5034
  raise ValueError(
5035
- ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpc1ybipwi/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
5036
 
5037
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
5038
  8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
@@ -5693,7 +5693,7 @@ ChildProcessError: Traceback (most recent call last):
5693
  cls._check_and_enable_flash_attn_2(
5694
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
5695
  raise ValueError(
5696
- ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp0m75hu5z/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
5697
 
5698
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
5699
  8bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1753.206784,4370.989056,0.0,3716.153344,3502.931968,s,10,0.7158005905151366,0.07158005905151367,0.0026151097512319512,0.07056062698364257,0.07317087783813477,0.0761810390472412,0.07858916801452637,"[0.07919120025634765, 0.07085072326660156, 0.07094694519042968, 0.07031388854980469, 0.07023776245117187, 0.07029948425292969, 0.07049616241455078, 0.072501953125, 0.07033737945556641, 0.07062509155273437]",tokens/s,3576.4150434098656,kWh,8.388341447797885e-07,4.5958770674104095e-07,1.8877674676594226e-06,3.1861893191802524e-06,tokens/kWh,80346763.5959134,MB,1753.206784,4370.989056,0.0,3716.153344,3632.817664,s,10,44.667627929687505,4.46676279296875,0.018600767003160216,4.460619384765625,4.485654638671876,4.498310229492188,4.5084347021484374,"[4.48284228515625, 4.4655009765625, 4.45241162109375, 4.45341796875, 4.44769091796875, 4.4522490234375, 4.481310546875, 4.4602392578125, 4.46099951171875, 4.5109658203125]",tokens/s,14.104174078634749,kWh,5.310124745159473e-05,2.9102769588021433e-05,9.395025364714023e-05,0.0001761542706867564,tokens/kWh,357641.05947807967,,s,629,45.231303550720156,0.07190986256076345,0.008457534828436213,0.07048806762695313,0.07254323120117187,0.07291353302001953,0.14091268249511718,"[0.07149763488769531, 0.07236300659179687, 0.07048397064208985, 0.07265382385253906, 0.07177318572998047, 0.07225138854980469, 0.07384473419189454, 0.07193087768554687, 0.07062732696533203, 0.07043583679199218, 0.070434814453125, 0.07033241271972657, 0.07061817932128907, 0.07013779449462891, 0.07195852661132812, 0.07246438598632812, 0.07255142211914062, 0.07255961608886718, 0.07078912353515625, 0.07081983947753906, 0.07044915008544922, 0.07034265899658203, 0.07060787200927734, 0.07021568298339843, 0.07060275268554687, 0.07041433715820312, 0.07039282989501953, 0.07039385223388672, 0.07066316986083984, 0.07061299133300782, 0.07034162902832031, 0.07017164611816407, 0.07033654022216797, 0.07016553497314452, 0.0703917465209961, 0.07013990020751953, 0.07160626983642578, 0.07050240325927734, 0.07033548736572266, 0.07311052703857422, 0.07278694152832031, 0.07141990661621093, 0.07023411560058594, 0.07154585266113281, 0.07023104095458985, 0.07205171203613281, 0.07074508666992188, 0.07263231658935547, 0.07006617736816406, 0.07114649963378906, 0.07070105743408203, 0.07260057830810547, 0.07075430297851562, 0.07085670471191406, 0.07270502471923829, 0.0717649917602539, 0.070434814453125, 0.07098060607910156, 0.07048089599609375, 0.07204761505126953, 0.07027814483642578, 0.07079833221435547, 0.14196018981933595, 0.07093965148925781, 0.07223808288574218, 0.0704194564819336, 0.07073382568359375, 0.0712837142944336, 0.07221247863769531, 0.07236198425292968, 0.0719656982421875, 0.07015936279296875, 0.07168716430664063, 0.07038979339599609, 0.07106658935546875, 0.0719288330078125, 0.07191756439208985, 0.07084031677246094, 0.07375462341308593, 0.07192269134521484, 0.07429017639160156, 0.07091712188720703, 0.07006105804443359, 0.0702371826171875, 0.07047277069091797, 0.0703354263305664, 0.07038566589355469, 0.07033856201171874, 0.07036927795410156, 0.07030681610107421, 0.07035903930664063, 0.07051980590820313, 0.0704532470703125, 0.07031910705566406, 0.07042253112792969, 0.0697681884765625, 0.07008972930908203, 0.07012351989746093, 0.06991155242919922, 0.07039590454101563, 0.0702730255126953, 0.0704368667602539, 0.0709191665649414, 0.07057107543945312, 0.07038355255126953, 0.06974566650390625, 0.07057408142089844, 0.07198207855224609, 0.07142912292480469, 0.07038668823242188, 0.07020134735107422, 0.07028018951416015, 0.0721817626953125, 0.07068876647949218, 0.0739788818359375, 0.07211827087402344, 0.07048703765869141, 0.07026073455810547, 0.07033856201171874, 0.06981843566894531, 0.07036307525634766, 0.07035903930664063, 0.07015936279296875, 0.0702525405883789, 0.07032115173339844, 0.14248550415039063, 0.0704532470703125, 0.07049625396728515, 0.07205888366699219, 0.07040105438232422, 0.07024227142333984, 0.0702730255126953, 0.0700549087524414, 0.07016038513183594, 0.07020751953125, 0.07015318298339844, 0.07036006164550782, 0.07208345794677734, 0.07120384216308594, 0.07030886077880859, 0.07037747192382812, 0.07025459289550781, 0.07048806762695313, 0.07146086120605469, 0.07031705474853515, 0.07024639892578124, 0.07025357055664062, 0.07015628814697265, 0.07031603240966797, 0.07018701171875, 0.07035289764404297, 0.07062220764160156, 0.07034982299804687, 0.07380480194091797, 0.07419801330566406, 0.07048499298095703, 0.07023104095458985, 0.07022489929199219, 0.07049625396728515, 0.07031910705566406, 0.06968934631347656, 0.07044915008544922, 0.07046144104003907, 0.07019007873535156, 0.0704931869506836, 0.07031504058837891, 0.07061500549316406, 0.07025459289550781, 0.06980095672607421, 0.07020543670654297, 0.07057920074462891, 0.07047270202636718, 0.07049215698242188, 0.07036313629150391, 0.0703969955444336, 0.07029548645019532, 0.07073689270019531, 0.07023206329345703, 0.07331123352050781, 0.07239373016357421, 0.07279411315917969, 0.07025049591064453, 0.07048601531982422, 0.07033139038085938, 0.07038771057128906, 0.07031398773193359, 0.07084646606445312, 0.07017683410644532, 0.1412003173828125, 0.07280127716064454, 0.07291295623779297, 0.07079011535644532, 0.07045833587646484, 0.06992281341552735, 0.07036006164550782, 0.07027712249755859, 0.07034368133544922, 0.070329345703125, 0.07038361358642578, 0.07045938873291016, 0.07043276977539062, 0.07054847717285156, 0.07040716552734375, 0.07084953308105468, 0.07044198608398437, 0.07054541015625, 0.07037337493896484, 0.07072358703613281, 0.07076454162597656, 0.07148953247070312, 0.07052082824707032, 0.0703795166015625, 0.0707430419921875, 0.07142809295654297, 0.07075020599365234, 0.07038259124755859, 0.07049215698242188, 0.07050035095214843, 0.07211110687255859, 0.07184690856933594, 0.07365119934082032, 0.0705771484375, 0.06972825622558594, 0.0703672332763672, 0.07038566589355469, 0.07066214752197265, 0.07053619384765625, 0.07096217346191407, 0.07095398712158203, 0.0704368667602539, 0.07037849426269531, 0.07100518035888671, 0.07086080169677735, 0.07033446502685547, 0.07027609252929687, 0.07033446502685547, 0.07015017700195313, 0.07036412811279297, 0.07049420928955077, 0.07031603240966797, 0.07026585388183594, 0.07072870635986328, 0.07130521392822266, 0.07034572601318359, 0.07048703765869141, 0.07038976287841797, 0.07045529937744141, 0.07031807708740234, 0.07038566589355469, 0.07027097320556641, 0.07006105804443359, 0.14092185974121094, 0.0704901123046875, 0.07320371246337891, 0.07042150115966797, 0.07148339080810547, 0.07050240325927734, 0.06999040222167968, 0.07028530883789062, 0.0701317138671875, 0.07035596466064453, 0.07015526580810547, 0.07016550445556641, 0.07022592163085938, 0.07046144104003907, 0.07004672241210938, 0.07037337493896484, 0.07001395416259766, 0.07048703765869141, 0.07069593811035156, 0.0704676513671875, 0.07074195098876954, 0.0702485122680664, 0.0703180160522461, 0.07263129425048828, 0.07168307495117188, 0.07106047821044922, 0.07031193542480468, 0.0709939193725586, 0.07001087951660157, 0.0700231704711914, 0.06932275390625, 0.0694999008178711, 0.07037133026123046, 0.07136972808837891, 0.06993408203125, 0.06980403137207031, 0.07046963500976562, 0.07041843414306641, 0.07022796630859375, 0.07048397064208985, 0.07091302490234375, 0.07066422271728516, 0.07354160308837891, 0.07238349151611329, 0.07298252868652344, 0.07026687622070313, 0.07172300720214844, 0.07047782135009766, 0.07016754913330078, 0.07050137329101562, 0.07046348571777344, 0.0704194564819336, 0.07021363067626953, 0.07042662048339844, 0.07023513793945313, 0.07038361358642578, 0.07011328125, 0.07033245086669922, 0.07018902587890626, 0.07036927795410156, 0.06995763397216796, 0.07021363067626953, 0.07022182464599609, 0.14130278015136719, 0.07017369842529297, 0.07012454223632812, 0.07298764801025391, 0.07130521392822266, 0.07037849426269531, 0.070181884765625, 0.07114342498779297, 0.07037644958496093, 0.07040921783447265, 0.07054438018798828, 0.07054847717285156, 0.07034060668945312, 0.0706529312133789, 0.07021977233886718, 0.0706529312133789, 0.07132982635498047, 0.07049520111083984, 0.0703672332763672, 0.07049113464355469, 0.07051776123046875, 0.07055667114257813, 0.07032627105712891, 0.07019929504394531, 0.07023411560058594, 0.0705638427734375, 0.0705955810546875, 0.07053311920166015, 0.07051264190673828, 0.07064784240722656, 0.07040406036376953, 0.07043993377685547, 0.07029350280761719, 0.07095097351074219, 0.07063545227050781, 0.07042969512939454, 0.0700794906616211, 0.07004876708984376, 0.07055564880371094, 0.07064268493652344, 0.0701470718383789, 0.07053823852539062, 0.07053209686279296, 0.07260671997070313, 0.071804931640625, 0.07340850830078124, 0.07225446319580078, 0.07109529876708984, 0.07053926086425781, 0.0705269775390625, 0.07047885131835938, 0.07037849426269531, 0.07057817840576172, 0.07064985656738282, 0.07054950714111329, 0.07037337493896484, 0.0700426254272461, 0.07076044464111328, 0.07062937927246093, 0.07047270202636718, 0.07053215789794921, 0.07040300750732421, 0.07008665466308593, 0.1408890838623047, 0.0700579833984375, 0.07040306854248046, 0.07095603179931641, 0.07067135620117188, 0.07052902221679687, 0.07032217407226563, 0.07087411499023437, 0.07056179046630859, 0.07040306854248046, 0.07119155120849609, 0.07045222473144531, 0.07153561401367188, 0.07102361297607422, 0.07295692443847657, 0.07263641357421875, 0.07663104248046874, 0.0731514892578125, 0.07261190032958985, 0.07177107238769531, 0.07096627044677735, 0.07251455688476563, 0.07052288055419922, 0.07034982299804687, 0.07089356994628906, 0.07131443023681641, 0.07168000030517578, 0.07194931030273438, 0.07047577667236328, 0.07285247802734375, 0.07253094482421875, 0.07324364471435547, 0.07126322937011718, 0.0699504623413086, 0.0695050277709961, 0.06952652740478515, 0.0703641586303711, 0.07017676544189454, 0.06994944000244141, 0.06934425354003906, 0.0704686050415039, 0.0703969955444336, 0.07023200225830079, 0.07009894561767578, 0.0696596450805664, 0.07089459228515625, 0.07034674835205078, 0.0717158432006836, 0.07167692565917969, 0.07193702697753906, 0.07162572479248047, 0.07148748779296875, 0.07270706939697266, 0.07029759979248047, 0.0700979232788086, 0.07177833557128906, 0.07124784088134765, 0.07156326293945313, 0.0718397445678711, 0.07128985595703125, 0.07057920074462891, 0.07054438018798828, 0.07034880065917969, 0.14348287963867187, 0.07049830627441406, 0.07024639892578124, 0.0706170883178711, 0.06970982360839843, 0.07032524871826172, 0.07015833282470703, 0.07006208038330078, 0.07034371185302735, 0.07042147064208984, 0.0702003173828125, 0.07272038269042969, 0.07259852600097656, 0.07238041687011719, 0.07269478607177735, 0.07254118347167969, 0.07344640350341797, 0.07283404541015626, 0.0721981430053711, 0.07240908813476563, 0.07258624267578125, 0.07021670532226562, 0.07021670532226562, 0.07076249694824219, 0.07023308563232422, 0.07016960144042969, 0.0699504623413086, 0.07017472076416016, 0.07204351806640626, 0.07038873291015625, 0.07032012939453125, 0.07037439727783203, 0.07026687622070313, 0.07033139038085938, 0.0702525405883789, 0.07038259124755859, 0.07035801696777344, 0.0702894058227539, 0.07015731048583984, 0.07010304260253906, 0.0701470718383789, 0.07023312377929687, 0.07039177703857422, 0.07029657745361328, 0.0702371826171875, 0.07127552032470703, 0.07026278686523438, 0.07011532592773438, 0.07021670532226562, 0.0706324462890625, 0.07047885131835938, 0.07055052947998047, 0.07046451568603515, 0.07022284698486328, 0.07026892852783204, 0.07008460998535156, 0.07233433532714843, 0.07216537475585938, 0.07277977752685547, 0.07074610900878907, 0.06977126312255859, 0.06972006225585937, 0.06992998504638671, 0.14085635375976563, 0.06998320007324219, 0.070329345703125, 0.07024127960205079, 0.07040921783447265, 0.06970572662353515, 0.07047065734863281, 0.07047577667236328, 0.07034880065917969, 0.07042047882080078, 0.07067340850830078, 0.07035801696777344, 0.07065087890625, 0.07035494232177734, 0.07040921783447265, 0.07037542724609375, 0.07243059539794922, 0.07194419097900391, 0.07237017822265625, 0.070329345703125, 0.07091404724121093, 0.07100927734375, 0.07039794921875, 0.07029964447021485, 0.07054438018798828, 0.07038976287841797, 0.07050137329101562, 0.07172505950927735, 0.073301025390625, 0.07376380920410157, 0.0728616943359375, 0.07057202911376953, 0.07004160308837891, 0.06957158660888672, 0.07051570892333985, 0.07044198608398437, 0.07060582733154297, 0.0704716796875, 0.07048806762695313, 0.07044403076171875, 0.07057510375976563, 0.07046041870117188, 0.07126732635498047, 0.07257907104492188, 0.07077581024169922, 0.07048601531982422, 0.07047987365722656, 0.0702730255126953, 0.06994739532470703, 0.07239577484130859, 0.07052902221679687, 0.07033036804199219, 0.07229440307617188, 0.07042969512939454, 0.07047782135009766, 0.07042156982421875, 0.07127855682373047, 0.07062627410888672, 0.07017164611816407, 0.0716042251586914, 0.07137177276611328, 0.07010201263427734, 0.07049420928955077, 0.14187315368652345, 0.07062322998046874, 0.07053721618652344, 0.07057612609863281, 0.07067750549316407, 0.070793212890625, 0.07038566589355469, 0.07079936218261719, 0.0702730255126953, 0.070614013671875, 0.07046553802490234, 0.0705116195678711, 0.07048397064208985, 0.07051673889160157, 0.0705423355102539, 0.07029043579101563, 0.07032319641113281, 0.0703078384399414, 0.07160012817382813, 0.07250431823730469, 0.07043788909912109, 0.07060384368896484, 0.07045728302001954, 0.07036825561523438, 0.07142400360107422, 0.07265177917480468, 0.07246028900146484, 0.07253606414794922, 0.07204659271240234, 0.07295078277587891, 0.0729139175415039, 0.07195340728759765, 0.072416259765625, 0.07247154998779297, 0.07184690856933594, 0.07263231658935547, 0.07274086761474609, 0.0717096939086914, 0.07236402893066406, 0.07166976165771484, 0.07194931030273438, 0.07173426818847656, 0.07251148986816407, 0.07147315216064454, 0.07262515258789062, 0.0719482879638672, 0.07177523040771484, 0.07152435302734375, 0.0726824951171875, 0.07250125122070313, 0.07197081756591797, 0.07249203491210937, 0.07244595336914063, 0.07192063903808593, 0.07164313507080078, 0.07215821075439453, 0.07262723541259766, 0.071847900390625, 0.07213568115234376, 0.07099801635742188, 0.07239577484130859, 0.07256371307373047, 0.07234559631347656]",tokens/s,13.906298307203762,,,main,False,False,
@@ -5997,7 +5997,7 @@ Traceback (most recent call last):
5997
  hf_raise_for_status(response)
5998
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
5999
  raise HfHubHTTPError(message, response=response) from e
6000
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c310-11b715bc715e9d6b06083aaf;d8721678-b285-42e6-8c10-967c7bdd577d)
6001
 
6002
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
6003
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
@@ -6118,7 +6118,7 @@ ChildProcessError: Traceback (most recent call last):
6118
  cls._check_and_enable_flash_attn_2(
6119
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
6120
  raise ValueError(
6121
- ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpuri9n99l/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
6122
 
6123
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
6124
  8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
@@ -6160,7 +6160,7 @@ Traceback (most recent call last):
6160
  hf_raise_for_status(response)
6161
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
6162
  raise RepositoryNotFoundError(message, response) from e
6163
- huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6678c338-114df07f3dbb72f43a18a657;5caa08db-8c9b-4b3c-ab8a-228d59953305)
6164
 
6165
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
6166
  Please make sure you specified the correct `repo_id` and `repo_type`.
@@ -6221,7 +6221,7 @@ Traceback (most recent call last):
6221
  hf_raise_for_status(response)
6222
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
6223
  raise HfHubHTTPError(message, response=response) from e
6224
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2bc-1dc272d1096b2fdd0256d1e5;e2b16fe4-23d0-48e6-9593-a4f266b78035)
6225
 
6226
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
6227
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
@@ -6369,7 +6369,7 @@ Traceback (most recent call last):
6369
  hf_raise_for_status(response)
6370
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
6371
  raise HfHubHTTPError(message, response=response) from e
6372
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2e6-2ec256b4203e0c4e6b9b6560;9f653d13-cbda-4350-8ae9-b7d12bbb0bfa)
6373
 
6374
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
6375
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
@@ -6627,7 +6627,7 @@ ChildProcessError: Traceback (most recent call last):
6627
  cls._check_and_enable_flash_attn_2(
6628
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
6629
  raise ValueError(
6630
- ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpg9fbapro/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
6631
 
6632
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
6633
  8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
@@ -6919,7 +6919,7 @@ Traceback (most recent call last):
6919
  hf_raise_for_status(response)
6920
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
6921
  raise HfHubHTTPError(message, response=response) from e
6922
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c08d-08b583ce21897eb1788067f3;45e6fb87-315e-4e45-9066-a7a71d4fccbe)
6923
 
6924
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
6925
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
@@ -7125,7 +7125,7 @@ ChildProcessError: Traceback (most recent call last):
7125
  cls._check_and_enable_flash_attn_2(
7126
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
7127
  raise ValueError(
7128
- ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp_tulcjow/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
7129
 
7130
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
7131
  8bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,5660.131328,10962.337792,0.0,10307.50208,10029.210624,s,10,1.4342690734863281,0.1434269073486328,0.0032239528002932865,0.14276050567626952,0.14646627197265624,0.14882899169921876,0.15071916748046876,"[0.15119171142578125, 0.14055494689941406, 0.14345762634277343, 0.14594122314453126, 0.14206338500976562, 0.1407220458984375, 0.14052482604980468, 0.14054188537597656, 0.1442948760986328, 0.14497654724121095]",tokens/s,1784.8812662308324,kWh,1.7037357295316364e-06,9.333370224703686e-07,4.414690039685912e-06,7.051762791687916e-06,tokens/kWh,36302979.490710236,MB,5660.131328,10962.337792,0.0,10307.50208,10056.90112,s,10,91.03206542968749,9.10320654296875,0.08025953626004148,9.07454150390625,9.2241923828125,9.259001464843749,9.28684873046875,"[9.293810546875, 9.0487578125, 9.21645703125, 9.09006640625, 9.034521484375, 9.0707138671875, 9.078369140625, 9.0585595703125, 9.098578125, 9.0422314453125]",tokens/s,6.920638316029503,kWh,0.00010834198840080747,5.937985369364469e-05,0.00020726067374491383,0.000374982515839366,tokens/kWh,168007.83326918574,,s,629,92.16478802490228,0.1465258951111325,0.017154203671883243,0.14422015380859374,0.14755471496582032,0.14824775695800782,0.2843094567871094,"[0.1534976043701172, 0.15138217163085937, 0.14815318298339844, 0.14723379516601562, 0.14589552307128906, 0.14708316040039063, 0.14719488525390625, 0.1486684112548828, 0.14777650451660157, 0.14928997802734376, 0.14852505493164062, 0.14738124084472656, 0.14825267028808595, 0.14617190551757814, 0.14766905212402343, 0.14603974914550782, 0.14723788452148437, 0.14720204162597655, 0.14758604431152345, 0.14771302795410157, 0.14740480041503906, 0.14766592407226561, 0.1476065216064453, 0.14713037109375, 0.14739353942871095, 0.14730035400390626, 0.1475030975341797, 0.1471815643310547, 0.1472184295654297, 0.14708837890625, 0.147631103515625, 0.14701676940917968, 0.1473258819580078, 0.14693174743652343, 0.14660508728027344, 0.147368896484375, 0.14727372741699218, 0.14718360900878907, 0.14627839660644532, 0.14710476684570312, 0.14769258117675782, 0.14700233459472656, 0.1472604217529297, 0.14715289306640625, 0.14720204162597655, 0.14623641967773438, 0.14750617980957031, 0.14715493774414062, 0.1470310363769531, 0.147125244140625, 0.1474846649169922, 0.14677708435058595, 0.1466603546142578, 0.14490419006347657, 0.14707200622558594, 0.14715187072753907, 0.14748672485351563, 0.14915379333496093, 0.14982861328125, 0.14755635070800782, 0.14706585693359375, 0.14647807312011718, 0.28261068725585936, 0.14193667602539062, 0.14197244262695313, 0.1418618927001953, 0.14242816162109376, 0.14205644226074218, 0.1417164764404297, 0.14202674865722656, 0.1422878723144531, 0.14224485778808593, 0.14182809448242187, 0.1425244140625, 0.144990234375, 0.14621385192871095, 0.14743449401855468, 0.14801100158691408, 0.14670028686523437, 0.14654258728027345, 0.14298725891113281, 0.14222029113769533, 0.14218547058105468, 0.14629273986816407, 0.14658047485351564, 0.14634701538085937, 0.14226022338867186, 0.14339993286132813, 0.14181272888183594, 0.14232882690429688, 0.14208409118652343, 0.14561077880859374, 0.14698086547851563, 0.1468139190673828, 0.1429698486328125, 0.14215065002441407, 0.14218547058105468, 0.141549560546875, 0.14222950744628907, 0.14219468688964843, 0.14192127990722656, 0.1424701385498047, 0.14214451599121095, 0.14214247131347657, 0.142055419921875, 0.14200729370117188, 0.1421486053466797, 0.14182298278808594, 0.14230117797851563, 0.14190899658203124, 0.14127513122558594, 0.14705255126953126, 0.14671974182128905, 0.1458841552734375, 0.1467658233642578, 0.14730137634277343, 0.14554623413085938, 0.14709759521484375, 0.14683135986328125, 0.14590156555175782, 0.1421158447265625, 0.14215884399414064, 0.14210560607910155, 0.1419694061279297, 0.14311935424804687, 0.2923089904785156, 0.14832540893554688, 0.14229808044433595, 0.14185369873046874, 0.14219161987304688, 0.14211276245117188, 0.1411440887451172, 0.14231138610839844, 0.14515507507324218, 0.14639616394042967, 0.14236569213867187, 0.14185267639160157, 0.14324223327636718, 0.14543565368652345, 0.1462415313720703, 0.14700749206542968, 0.14749183654785156, 0.14913433837890624, 0.14708224487304689, 0.14785842895507811, 0.14836531066894532, 0.14659686279296874, 0.14642585754394533, 0.14595379638671874, 0.1468037109375, 0.14741299438476563, 0.1471866912841797, 0.14695018005371094, 0.14615753173828125, 0.14739865112304687, 0.14753074645996095, 0.14726451110839844, 0.14712832641601561, 0.14760858154296874, 0.14934323120117188, 0.14819839477539062, 0.14772531127929686, 0.14653030395507813, 0.14631321716308593, 0.1442170867919922, 0.1412689971923828, 0.14300569152832032, 0.1467852783203125, 0.144468994140625, 0.1477908477783203, 0.14747853088378907, 0.14710578918457032, 0.14732902526855468, 0.1479761962890625, 0.14744166564941405, 0.14737202453613282, 0.14715904235839844, 0.14731980895996094, 0.14735565185546876, 0.14734335327148437, 0.14759117126464844, 0.14715699768066406, 0.14793318176269532, 0.1475758056640625, 0.1477191619873047, 0.14755430603027345, 0.14633676147460936, 0.1478974151611328, 0.293482421875, 0.14644940185546876, 0.1467545623779297, 0.14730239868164063, 0.14824038696289063, 0.14709564208984374, 0.1482085418701172, 0.14197247314453126, 0.14193356323242187, 0.14270669555664062, 0.14446080017089843, 0.1439488067626953, 0.14200114440917969, 0.141955078125, 0.14212515258789063, 0.14191094970703125, 0.14166220092773438, 0.14212095642089845, 0.1453096923828125, 0.14467481994628906, 0.141955078125, 0.14581964111328125, 0.1451612091064453, 0.14201548767089844, 0.14746214294433593, 0.14255410766601562, 0.14240460205078126, 0.1456005096435547, 0.14647500610351563, 0.14677606201171875, 0.14548991394042968, 0.1466112060546875, 0.14662661743164063, 0.14529632568359374, 0.14660301208496093, 0.14532095336914064, 0.14720204162597655, 0.14515609741210939, 0.14182818603515626, 0.14359234619140626, 0.14211993408203125, 0.14597222900390625, 0.14219981384277344, 0.14482534790039062, 0.1466050567626953, 0.1439139862060547, 0.14232984924316405, 0.14218138122558593, 0.14511410522460938, 0.1470054473876953, 0.14371839904785155, 0.14676480102539063, 0.14239027404785157, 0.1433210906982422, 0.14554112243652345, 0.14293913269042968, 0.14217727661132812, 0.1420052490234375, 0.1420738525390625, 0.14203392028808592, 0.1424547882080078, 0.14198886108398437, 0.1421096954345703, 0.28633599853515623, 0.1421793212890625, 0.1419683837890625, 0.14203494262695313, 0.14164070129394532, 0.142129150390625, 0.1420369873046875, 0.1409792022705078, 0.14163763427734374, 0.14192434692382813, 0.14748876953125, 0.1480816650390625, 0.14249778747558595, 0.14207693481445313, 0.1418946533203125, 0.14191513061523436, 0.14302822875976562, 0.1462671356201172, 0.14379315185546876, 0.1420042266845703, 0.14189056396484376, 0.142166015625, 0.14207283020019532, 0.14194586181640625, 0.1419632568359375, 0.1417574462890625, 0.14206259155273437, 0.14469427490234374, 0.14457958984375, 0.14223770141601563, 0.14819123840332032, 0.14243942260742187, 0.14197760009765625, 0.14243327331542968, 0.14391500854492187, 0.14455091857910157, 0.1463655090332031, 0.1469173126220703, 0.14727372741699218, 0.14673715209960939, 0.14708428955078126, 0.14422015380859374, 0.14662864685058594, 0.14663778686523438, 0.14674227905273438, 0.14578790283203125, 0.14431129455566405, 0.14236671447753907, 0.14213632202148438, 0.14202879333496093, 0.14202983093261717, 0.14231961059570314, 0.144500732421875, 0.1421271057128906, 0.1421967315673828, 0.14232269287109375, 0.1416243133544922, 0.14191513061523436, 0.14222438049316405, 0.14256434631347656, 0.14218240356445314, 0.14231039428710937, 0.14231858825683594, 0.28481637573242186, 0.14673817443847656, 0.1422530517578125, 0.1425244140625, 0.1424435272216797, 0.14243942260742187, 0.141549560546875, 0.14195712280273437, 0.14179942321777345, 0.14219264221191405, 0.14188543701171874, 0.14629682922363282, 0.14243536376953125, 0.14196937561035156, 0.1419141082763672, 0.14319308471679687, 0.14707200622558594, 0.14835813903808595, 0.146914306640625, 0.14225202941894532, 0.1429432373046875, 0.1466439666748047, 0.1463582763671875, 0.14676173400878906, 0.142097412109375, 0.14210456848144531, 0.142328857421875, 0.14207894897460938, 0.14204415893554687, 0.14187519836425783, 0.14248448181152343, 0.1420748748779297, 0.14453657531738281, 0.1497241668701172, 0.14483045959472657, 0.14850559997558593, 0.1470443572998047, 0.1467535400390625, 0.14210560607910155, 0.14510182189941406, 0.1472430114746094, 0.14219366455078125, 0.14655795288085938, 0.14226329040527344, 0.14511616516113282, 0.1430425567626953, 0.14588108825683593, 0.14455807495117187, 0.14212095642089845, 0.14284185791015624, 0.1465180206298828, 0.14465434265136717, 0.14633779907226563, 0.14715391540527345, 0.14545919799804688, 0.14233804321289062, 0.14329347229003905, 0.14232981872558595, 0.14286131286621093, 0.14224076843261718, 0.14246502685546875, 0.14232473754882813, 0.14228172302246095, 0.28571749877929686, 0.14685081481933593, 0.1466060791015625, 0.14657945251464843, 0.14648013305664062, 0.1426606140136719, 0.142166015625, 0.1418383331298828, 0.14211891174316407, 0.14176870727539062, 0.1424547882080078, 0.14432870483398438, 0.1421844482421875, 0.14212300109863282, 0.14295245361328124, 0.14228480529785156, 0.14206668090820312, 0.14567730712890625, 0.14654360961914062, 0.1468538818359375, 0.14368153381347656, 0.14674943542480468, 0.14677920532226563, 0.14910252380371095, 0.1466337890625, 0.14349510192871093, 0.1429053497314453, 0.1412884521484375, 0.14192025756835938, 0.14216397094726563, 0.1419171905517578, 0.14165298461914064, 0.14216294860839843, 0.14221107482910156, 0.1421271057128906, 0.14187930297851561, 0.14223359680175782, 0.14176153564453126, 0.14771302795410157, 0.1455615997314453, 0.1417820129394531, 0.1478830108642578, 0.146044921875, 0.14544895935058594, 0.14200320434570313, 0.1432238006591797, 0.14634495544433593, 0.14659584045410157, 0.14687437438964843, 0.1460561981201172, 0.14661734008789062, 0.14204722595214844, 0.1441239013671875, 0.14674227905273438, 0.14656819152832032, 0.1423207092285156, 0.14195193481445312, 0.1423093719482422, 0.14223155212402344, 0.14204928588867188, 0.14666957092285157, 0.14673715209960939, 0.1421107177734375, 0.2830059509277344, 0.14187110900878908, 0.14194586181640625, 0.1410232391357422, 0.1422387237548828, 0.142055419921875, 0.14226739501953126, 0.14214656066894532, 0.14211482238769532, 0.14605413818359375, 0.1423134765625, 0.14269541931152344, 0.14528614807128906, 0.14245785522460938, 0.14232167053222655, 0.1421558074951172, 0.14252642822265624, 0.14217010498046875, 0.1435924530029297, 0.14213938903808593, 0.14155264282226562, 0.14155264282226562, 0.14195097351074218, 0.14192947387695312, 0.14090956115722655, 0.14435122680664061, 0.1428019561767578, 0.14582473754882813, 0.14670745849609376, 0.14971084594726564, 0.14662042236328124, 0.14527590942382812, 0.14401536560058595, 0.14318284606933593, 0.14638182067871094, 0.14626200866699218, 0.14467277526855468, 0.1420185546875, 0.14180870056152345, 0.14723167419433594, 0.14519500732421875, 0.14663987731933595, 0.1462353973388672, 0.14677606201171875, 0.14902784729003907, 0.1424701385498047, 0.1466265869140625, 0.1469142761230469, 0.144500732421875, 0.14280601501464843, 0.1422776336669922, 0.14262168884277343, 0.14193458557128907, 0.1422878723144531, 0.1456906280517578, 0.14675865173339844, 0.14654360961914062, 0.14573875427246094, 0.14238514709472655, 0.14148812866210939, 0.14229196166992186, 0.1426042938232422, 0.14217625427246094, 0.2911856689453125, 0.14181683349609375, 0.14218751525878906, 0.14203904724121094, 0.14148403930664064, 0.14213529968261718, 0.14200729370117188, 0.14192250061035155, 0.14223033142089844, 0.14233088684082032, 0.1445386199951172, 0.14171449279785156, 0.1443634490966797, 0.1467105255126953, 0.14244557189941406, 0.143383544921875, 0.1467310028076172, 0.1423687744140625, 0.1447403564453125, 0.14611251831054686, 0.1460643768310547, 0.14269541931152344, 0.14153523254394532, 0.1422387237548828, 0.14567526245117188, 0.14662451171875, 0.14528204345703125, 0.14394776916503907, 0.14590975952148438, 0.1455422058105469, 0.14265849304199218, 0.1465630645751953, 0.15092428588867188, 0.14452120971679688, 0.147051513671875, 0.14275379943847658, 0.14206771850585936, 0.14204620361328124, 0.14201449584960937, 0.14219261169433595, 0.14223974609375, 0.14218853759765626, 0.142202880859375, 0.1419990997314453, 0.14330368041992186, 0.14642994689941408, 0.1418434600830078, 0.1423319091796875, 0.1424486389160156, 0.1481861114501953, 0.145069091796875, 0.14216802978515625, 0.1488527374267578, 0.14834072875976562, 0.14806016540527345, 0.1470699462890625, 0.14825881958007814, 0.1478973388671875, 0.1470586853027344, 0.1465149383544922, 0.1468651580810547, 0.14660096740722656, 0.14620057678222656, 0.28747674560546876, 0.14185987854003906, 0.14227145385742188, 0.14205952453613283, 0.14222848510742186, 0.14239846801757813, 0.14227865600585937, 0.14170317077636718, 0.14202879333496093, 0.14202163696289063, 0.14184652709960938, 0.14195814514160157, 0.14193463134765624, 0.1420113983154297, 0.14294326782226563, 0.14217619323730468, 0.14186393737792968, 0.14214041137695313, 0.1420779571533203, 0.14214247131347657, 0.14200831604003905, 0.14226943969726563, 0.14271385192871094, 0.14213020324707032, 0.14204412841796876, 0.14196633911132814, 0.141591552734375, 0.1420789794921875, 0.14242713928222656, 0.14199501037597656, 0.1419878387451172, 0.1420779571533203, 0.14217625427246094, 0.14183628845214843, 0.14210867309570313, 0.1430630340576172, 0.1449400329589844, 0.14324327087402344, 0.1468098602294922, 0.14377165222167967, 0.1458401336669922, 0.14713446044921874, 0.14582681274414064, 0.14665216064453124, 0.1467658233642578, 0.14667059326171875, 0.14654669189453126, 0.14684979248046875, 0.14678425598144532, 0.14661325073242187, 0.14607052612304688, 0.14693376159667967, 0.1446860809326172, 0.14237184143066406, 0.1426483154296875, 0.14714572143554688, 0.14626815795898437, 0.14899200439453125, 0.14655795288085938, 0.14257254028320313, 0.14196223449707032, 0.14228688049316407, 0.14241789245605468]",tokens/s,6.824732237544431,,,main,False,False,
@@ -7189,7 +7189,7 @@ ChildProcessError: Traceback (most recent call last):
7189
  cls._check_and_enable_flash_attn_2(
7190
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
7191
  raise ValueError(
7192
- ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpxxyfrefy/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
7193
 
7194
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True
7195
  4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
@@ -7970,7 +7970,7 @@ ChildProcessError: Traceback (most recent call last):
7970
  cls._check_and_enable_flash_attn_2(
7971
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
7972
  raise ValueError(
7973
- ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp66v3913s/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
7974
 
7975
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True
7976
  4bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1278.603264,3276.275712,0.0,2629.828608,2358.099968,s,10,1.363180648803711,0.1363180648803711,0.00026217317487357416,0.1361774597167969,0.13673182220458985,0.1368010154724121,0.13685637008666993,"[0.13687020874023437, 0.1363632354736328, 0.13641459655761717, 0.13614486694335937, 0.1360360565185547, 0.1361745300292969, 0.13614434814453125, 0.13671644592285156, 0.1361803894042969, 0.13613597106933595]",tokens/s,1877.9609307442738,kWh,1.6118631997444966e-06,8.832266388659214e-07,9.71787188840562e-06,1.2212961727016039e-05,tokens/kWh,20961336.465478946,MB,1278.603264,3276.275712,0.0,2629.828608,2358.102528,s,10,29.777363037109374,2.9777363037109374,0.02588726324207309,2.9713291015625,3.018982763671875,3.0193787109375,3.0196954687499997,"[2.9692578125, 2.954369873046875, 2.95029638671875, 2.94651025390625, 2.959674560546875, 3.018894775390625, 2.99993896484375, 2.973400390625, 3.019774658203125, 2.985245361328125]",tokens/s,21.157011089762264,kWh,3.511612323535098e-05,1.924535570923163e-05,7.12104706319921e-05,0.00012557194957657467,tokens/kWh,501704.4030329572,,s,629,30.95603612136841,0.04921468381775582,0.016312179544641713,0.046707710266113284,0.04863692855834961,0.04895887298583985,0.1833139434814453,"[0.04888371276855469, 0.04917145538330078, 0.048249855041503906, 0.04791910552978516, 0.04700057601928711, 0.04656435012817383, 0.04804095840454101, 0.048145408630371096, 0.05266739273071289, 0.052951038360595705, 0.04698521423339844, 0.04662681579589844, 0.04637593460083008, 0.04695449447631836, 0.046437374114990236, 0.04646912002563477, 0.046312446594238284, 0.046415870666503906, 0.046268417358398435, 0.04664115142822266, 0.04859187316894531, 0.048347137451171876, 0.048274433135986325, 0.048266239166259765, 0.04822531127929688, 0.046888927459716796, 0.04660531234741211, 0.046668800354003906, 0.0465428466796875, 0.04667289733886719, 0.04662374496459961, 0.046584831237792966, 0.046617633819580076, 0.04658787155151367, 0.04661043167114258, 0.0464793586730957, 0.04622335815429687, 0.046317569732666014, 0.04641593551635742, 0.046725055694580075, 0.04673023986816406, 0.04653977584838867, 0.046630912780761716, 0.046773246765136715, 0.04663603210449219, 0.046601215362548826, 0.04661043167114258, 0.04664422225952149, 0.0466063346862793, 0.04671692657470703, 0.04665651321411133, 0.04674764633178711, 0.046693374633789066, 0.04663808059692383, 0.04658892822265625, 0.0465797119140625, 0.04658278274536133, 0.046614528656005856, 0.04653158569335938, 0.0465797119140625, 0.04661964797973633, 0.046682113647460936, 0.18365440368652344, 0.046698497772216796, 0.046683135986328124, 0.04674764633178711, 0.04665350341796875, 0.046623680114746095, 0.04651724624633789, 0.04652339172363281, 0.0466165771484375, 0.046535678863525394, 0.04666572952270508, 0.046543872833251954, 0.04715827178955078, 0.049702945709228515, 0.04864508819580078, 0.04698214340209961, 0.04639231872558594, 0.04759756851196289, 0.04860313415527344, 0.04817203140258789, 0.04827340698242188, 0.04671078491210937, 0.04645171356201172, 0.04654489517211914, 0.04678246307373047, 0.046707710266113284, 0.04657766342163086, 0.04651827239990235, 0.04720640182495117, 0.04652032089233398, 0.04651007843017578, 0.04648038482666016, 0.04660019302368164, 0.04666572952270508, 0.04597760009765625, 0.045873153686523435, 0.046465023040771485, 0.04595916748046875, 0.04644659042358398, 0.04680294418334961, 0.0465428466796875, 0.04660639953613281, 0.04682745742797852, 0.04664115142822266, 0.046595073699951174, 0.04654694366455078, 0.04647731018066406, 0.04648857498168945, 0.046524417877197265, 0.046465023040771485, 0.049186817169189455, 0.04894003295898437, 0.047738910675048825, 0.04649980926513672, 0.04653366470336914, 0.046507999420166014, 0.048075775146484374, 0.04660326385498047, 0.04653158569335938, 0.04651827239990235, 0.04651212692260742, 0.046837760925292966, 0.04677119827270508, 0.1835816955566406, 0.04658380889892578, 0.047800319671630856, 0.046117889404296876, 0.046486526489257815, 0.046491649627685545, 0.046429183959960936, 0.04612607955932617, 0.046516223907470705, 0.04642201614379883, 0.0464640007019043, 0.04640972900390625, 0.047303680419921876, 0.04697292709350586, 0.046301185607910154, 0.04644454574584961, 0.04625612640380859, 0.046429183959960936, 0.046516223907470705, 0.04658790588378906, 0.046541854858398436, 0.046540767669677734, 0.04672512054443359, 0.0474337272644043, 0.0465428466796875, 0.04739686584472656, 0.0466165771484375, 0.0465797119140625, 0.046473217010498044, 0.046413822174072264, 0.04655001449584961, 0.04657766342163086, 0.046514175415039063, 0.04658796691894531, 0.04752275085449219, 0.04734975814819336, 0.04658892822265625, 0.04655411148071289, 0.04851200103759765, 0.04827852630615234, 0.0474224624633789, 0.04655001449584961, 0.046489601135253904, 0.046475265502929686, 0.048115711212158206, 0.04731289672851562, 0.046516223907470705, 0.04650188827514649, 0.049426433563232425, 0.04711423873901367, 0.04655104064941406, 0.046489601135253904, 0.04655615997314453, 0.04638924789428711, 0.046486526489257815, 0.04640563201904297, 0.04651212692260742, 0.04775321578979492, 0.04838092803955078, 0.04723814392089844, 0.04639539337158203, 0.04654694366455078, 0.04658790588378906, 0.18326527404785156, 0.04662681579589844, 0.04649369430541992, 0.046584831237792966, 0.04655513763427734, 0.04655923080444336, 0.04653875350952148, 0.04658995056152344, 0.04652851104736328, 0.04656639862060547, 0.04646297454833984, 0.04653363037109375, 0.04658995056152344, 0.046565376281738284, 0.0465530891418457, 0.04670361709594727, 0.046663681030273435, 0.04671897506713867, 0.04660019302368164, 0.04658073425292969, 0.046650367736816405, 0.04658687973022461, 0.04647731018066406, 0.046604286193847655, 0.04630326461791992, 0.046703582763671876, 0.05004185485839844, 0.046838783264160154, 0.04659609603881836, 0.04656435012817383, 0.046630912780761716, 0.046698558807373045, 0.04662172698974609, 0.04658371353149414, 0.0465797119140625, 0.04662579345703125, 0.047288318634033204, 0.048198654174804685, 0.04846694564819336, 0.04801331329345703, 0.04657664108276367, 0.046884864807128904, 0.048734302520751956, 0.046059425354003904, 0.04659616088867188, 0.04658988952636719, 0.04660636901855469, 0.04648137664794922, 0.04628275299072265, 0.046652416229248046, 0.046268417358398435, 0.046714881896972656, 0.04660838317871094, 0.04664934539794922, 0.04652544021606445, 0.04658585739135742, 0.04658995056152344, 0.04665651321411133, 0.0466063346862793, 0.046688255310058595, 0.046693374633789066, 0.046680065155029295, 0.04660224151611328, 0.18300210571289063, 0.04653670501708984, 0.046473217010498044, 0.0461578254699707, 0.046496768951416016, 0.04647423934936523, 0.04640256118774414, 0.04640665435791016, 0.04638003158569336, 0.046358528137207033, 0.04634316635131836, 0.046322689056396485, 0.04616806411743164, 0.046317569732666014, 0.046516223907470705, 0.04638412857055664, 0.04650495910644531, 0.04644454574584961, 0.046458881378173826, 0.04602982330322265, 0.04636876678466797, 0.046475265502929686, 0.046412799835205076, 0.046519294738769534, 0.046527488708496094, 0.04621311950683594, 0.046467071533203126, 0.04649369430541992, 0.04691763305664062, 0.04690124893188476, 0.04652544021606445, 0.046150657653808595, 0.04600428771972656, 0.04650182342529297, 0.04653055953979492, 0.04657766342163086, 0.04649267196655273, 0.04682963180541992, 0.046623680114746095, 0.04650495910644531, 0.04646809768676758, 0.046461952209472655, 0.047339519500732424, 0.048694271087646485, 0.04841471862792969, 0.04851200103759765, 0.04836249542236328, 0.047900672912597655, 0.04835948944091797, 0.048065471649169925, 0.04837171173095703, 0.04834201431274414, 0.04732108688354492, 0.047524864196777344, 0.0466063346862793, 0.04696985626220703, 0.047857662200927735, 0.048399360656738284, 0.048811008453369144, 0.047870975494384765, 0.04825497436523438, 0.048231422424316404, 0.04836454391479492, 0.18703053283691407, 0.04863692855834961, 0.048642047882080076, 0.04825190353393555, 0.048363521575927736, 0.04759347152709961, 0.048571392059326174, 0.048143360137939455, 0.04746547317504883, 0.04823654556274414, 0.048881664276123046, 0.048694271087646485, 0.04863692855834961, 0.04817715072631836, 0.04658790588378906, 0.04653055953979492, 0.04757913589477539, 0.0470302734375, 0.04621004867553711, 0.04739891052246094, 0.04717567825317383, 0.047252479553222655, 0.04823040008544922, 0.04765497589111328, 0.04656838226318359, 0.04729753494262695, 0.04845977783203125, 0.04829695892333984, 0.04857958221435547, 0.048396289825439455, 0.048353279113769534, 0.04774604797363281, 0.04651724624633789, 0.0465428466796875, 0.04867177581787109, 0.04863484954833985, 0.046604286193847655, 0.047421440124511716, 0.047592449188232425, 0.046993408203125, 0.04728115081787109, 0.0483164176940918, 0.04786175918579102, 0.04827238464355469, 0.04739788818359375, 0.04658892822265625, 0.047234046936035154, 0.04739891052246094, 0.046611457824707034, 0.04801126480102539, 0.048333824157714846, 0.04657664108276367, 0.04751564788818359, 0.0487014389038086, 0.048292865753173826, 0.04835635375976562, 0.04862259292602539, 0.048143360137939455, 0.049186817169189455, 0.04864614486694336, 0.04837887954711914, 0.04873011016845703, 0.05090409469604492, 0.1834936065673828, 0.04677836990356445, 0.046693374633789066, 0.046698497772216796, 0.046824447631835936, 0.04659302520751953, 0.04657561492919922, 0.046635009765625, 0.04619468688964844, 0.04654694366455078, 0.048099327087402347, 0.04747673416137695, 0.04760268783569336, 0.04588032150268555, 0.04759347152709961, 0.048570369720458986, 0.048827392578125, 0.047435840606689456, 0.047363006591796875, 0.047933441162109375, 0.04711936187744141, 0.04856524658203125, 0.047963134765625, 0.04738457489013672, 0.04847923278808594, 0.0484771842956543, 0.04849356842041016, 0.04822220611572266, 0.047645694732666014, 0.04873932647705078, 0.04708966445922851, 0.04839321517944336, 0.048563201904296874, 0.04717977523803711, 0.046927871704101565, 0.046620670318603515, 0.046639102935791016, 0.04680089569091797, 0.047659008026123044, 0.047280128479003904, 0.05217792129516602, 0.04737740707397461, 0.0483133430480957, 0.04905779266357422, 0.04962508773803711, 0.04874444961547852, 0.048589824676513675, 0.04659814453125, 0.04643532943725586, 0.04657664108276367, 0.04866457748413086, 0.04680908966064453, 0.04842291259765625, 0.047442943572998046, 0.04648448181152344, 0.04641689682006836, 0.047323135375976565, 0.048643070220947264, 0.04859187316894531, 0.0487823371887207, 0.047777793884277345, 0.04702310562133789, 0.04630220794677734, 0.1877821502685547, 0.04894412612915039, 0.04664115142822266, 0.04641177749633789, 0.04643430328369141, 0.04662579345703125, 0.046532608032226565, 0.04658790588378906, 0.04613836669921875, 0.046475265502929686, 0.046382080078125, 0.046450687408447267, 0.04670873641967774, 0.04652134323120117, 0.046268417358398435, 0.045902847290039066, 0.046025726318359376, 0.04748185729980469, 0.04684902572631836, 0.046461952209472655, 0.047372287750244144, 0.04662169647216797, 0.04873625564575195, 0.04875161743164062, 0.04831545639038086, 0.04846995162963867, 0.046698497772216796, 0.04655411148071289, 0.04661248016357422, 0.04654079818725586, 0.04655001449584961, 0.046561279296875, 0.04655411148071289, 0.04647423934936523, 0.04652339172363281, 0.0464824333190918, 0.04624998474121094, 0.04660940933227539, 0.04664524841308594, 0.04669747161865234, 0.04654489517211914, 0.04839321517944336, 0.04714393615722656, 0.046601215362548826, 0.048126976013183595, 0.048192512512207034, 0.046617599487304685, 0.04764876937866211, 0.04862464141845703, 0.048363521575927736, 0.04870355224609375, 0.04846380615234375, 0.048282623291015625, 0.04703539276123047, 0.047862785339355465, 0.047541248321533204, 0.04650393676757812, 0.046617599487304685, 0.046676990509033206, 0.048495616912841794, 0.04905779266357422, 0.04838915252685547, 0.046614494323730465, 0.18333287048339844, 0.04656025695800781, 0.046601215362548826, 0.04660838317871094, 0.04676095962524414, 0.04911718368530273, 0.04858879852294922, 0.04851923370361328, 0.049417152404785156, 0.04852633666992188, 0.048494590759277346, 0.04827033615112305, 0.050447425842285155, 0.04946220779418945, 0.04804710388183594, 0.0470384635925293, 0.048968704223632815, 0.0485928955078125, 0.047710208892822265, 0.04654079818725586, 0.04716032028198242, 0.04854988861083984, 0.048535552978515625, 0.04889395141601562, 0.048568321228027345, 0.04860927963256836, 0.048935935974121096, 0.04853964614868164, 0.04840755081176758, 0.04842905426025391, 0.04652134323120117, 0.04660326385498047, 0.047483905792236325, 0.04876697540283203, 0.04867993545532227, 0.04855295944213867, 0.048557056427001956, 0.04691046524047852, 0.04793552017211914, 0.04787401580810547, 0.04848332977294922, 0.04837273788452148, 0.048336894989013675, 0.048418815612792966, 0.04837376022338867, 0.047164417266845705, 0.04696780776977539, 0.04809318542480469, 0.04653875350952148, 0.046362686157226565, 0.04724319839477539, 0.04828672027587891, 0.04692582321166992, 0.04737945556640625, 0.048061439514160156, 0.04743270492553711, 0.047702014923095705, 0.048449569702148435, 0.04834406280517578, 0.048519134521484375, 0.04654182434082031, 0.04656639862060547, 0.047287296295166016, 0.18399539184570313, 0.046808063507080076, 0.04854272079467774, 0.048263168334960936, 0.04669852828979492, 0.04928508758544922, 0.046868480682373044, 0.046581760406494144, 0.046752769470214846, 0.05028966522216797, 0.048942081451416014, 0.0466247673034668, 0.04674150466918945, 0.046622718811035156, 0.046601215362548826, 0.04654489517211914, 0.04664012908935547, 0.04781158447265625, 0.04701696014404297, 0.04669235229492188, 0.04635340881347656, 0.04624076843261719, 0.0465797119140625, 0.04681216049194336, 0.04678041458129883, 0.04680908966064453, 0.048568321228027345, 0.04837887954711914, 0.048671745300292966, 0.04843622589111328, 0.04868096160888672, 0.04844339370727539, 0.04698316955566406, 0.04852121734619141, 0.046637054443359374, 0.04786380767822265, 0.04649683380126953, 0.04812998580932617, 0.0482979850769043, 0.046993408203125, 0.04710297775268555, 0.04897587203979492, 0.04673535919189453, 0.046611457824707034, 0.046875648498535157, 0.046709758758544925, 0.04660940933227539, 0.046548992156982424, 0.04666777420043945, 0.046676990509033206, 0.046685184478759766, 0.04659609603881836, 0.046706687927246096, 0.04922675323486328, 0.04920729446411133, 0.04852019119262695, 0.04854272079467774, 0.048086017608642576, 0.046620670318603515, 0.0466063346862793, 0.046553150177001956, 0.046623680114746095, 0.04688281631469727]",tokens/s,20.319138972893636,,,main,False,False,True
@@ -8274,7 +8274,7 @@ Traceback (most recent call last):
8274
  hf_raise_for_status(response)
8275
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
8276
  raise HfHubHTTPError(message, response=response) from e
8277
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c309-22e653cb0ec1950f370237a3;f889ec45-51da-4628-835e-425360ee2e86)
8278
 
8279
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
8280
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
@@ -8427,7 +8427,7 @@ ChildProcessError: Traceback (most recent call last):
8427
  cls._check_and_enable_flash_attn_2(
8428
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
8429
  raise ValueError(
8430
- ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp9ofnhuz3/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
8431
 
8432
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True
8433
  4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
@@ -8469,7 +8469,7 @@ Traceback (most recent call last):
8469
  hf_raise_for_status(response)
8470
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
8471
  raise RepositoryNotFoundError(message, response) from e
8472
- huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6678c331-1cc139af39355ebf5ce26726;04995a01-c051-4b0b-9b39-b2dce21b8595)
8473
 
8474
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
8475
  Please make sure you specified the correct `repo_id` and `repo_type`.
@@ -8530,7 +8530,7 @@ Traceback (most recent call last):
8530
  hf_raise_for_status(response)
8531
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
8532
  raise HfHubHTTPError(message, response=response) from e
8533
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2b5-7d097a1c079600d626ab085b;56d918d5-2577-4638-a35e-ab89548990b6)
8534
 
8535
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
8536
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
@@ -8708,7 +8708,7 @@ Traceback (most recent call last):
8708
  hf_raise_for_status(response)
8709
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
8710
  raise HfHubHTTPError(message, response=response) from e
8711
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2df-5280a03a7392afe24475e1eb;822a10c0-dd84-4273-90af-ae5c74919e00)
8712
 
8713
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
8714
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
@@ -8958,7 +8958,7 @@ ChildProcessError: Traceback (most recent call last):
8958
  cls._check_and_enable_flash_attn_2(
8959
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
8960
  raise ValueError(
8961
- ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmprxcmyg_5/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
8962
 
8963
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True
8964
  4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
@@ -9280,7 +9280,7 @@ Traceback (most recent call last):
9280
  hf_raise_for_status(response)
9281
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
9282
  raise HfHubHTTPError(message, response=response) from e
9283
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c086-3e8973913f6f29c17a2854ef;337a287e-4ecf-4ad8-882b-6194ecb4c98e)
9284
 
9285
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
9286
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
@@ -9470,7 +9470,7 @@ ChildProcessError: Traceback (most recent call last):
9470
  cls._check_and_enable_flash_attn_2(
9471
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
9472
  raise ValueError(
9473
- ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpmbjm50v1/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
9474
 
9475
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True
9476
  4bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,2088.595456,7149.715456,0.0,6503.268352,6130.0736,s,10,4.354886688232422,0.43548866882324216,0.0009500705262691482,0.43506301879882814,0.4360664276123047,0.43712642974853516,0.4379744314575195,"[0.43818643188476564, 0.4357357177734375, 0.43495596313476564, 0.43503363037109377, 0.43495343017578125, 0.4350522766113281, 0.43583087158203127, 0.43494912719726564, 0.4350737609863281, 0.435115478515625]",tokens/s,587.8453753842819,kWh,5.1466148040720816e-06,2.82011353963595e-06,3.132101539495727e-05,3.92877437386653e-05,tokens/kWh,6516027.026211126,MB,2088.595456,7149.715456,0.0,6503.268352,6130.07616,s,10,52.50473486328125,5.250473486328125,0.09201662696971703,5.295069091796875,5.3397248046875,5.347057958984375,5.352924482421875,"[5.33809521484375, 5.3243525390625, 5.32192578125, 5.2103232421875, 5.2842451171875, 5.35439111328125, 5.30589306640625, 5.11702490234375, 5.11619873046875, 5.13228515625]",tokens/s,11.998917843133139,kWh,6.21844898056293e-05,3.408169959179007e-05,0.00014644368237224062,0.00024270987176965998,tokens/kWh,259569.1701398498,,s,629,56.34009081268308,0.08957089159409079,0.05164629418121464,0.08460291290283203,0.08540467224121094,0.08579256439208983,0.5164155053710938,"[0.0808826904296875, 0.08587264251708984, 0.08512409973144532, 0.08462745666503907, 0.083346435546875, 0.08475443267822266, 0.08069427490234375, 0.08058879852294921, 0.08235929870605468, 0.08576921844482421, 0.08510054779052735, 0.0855224609375, 0.08536675262451172, 0.08540262603759766, 0.085212158203125, 0.08498175811767578, 0.08550911712646485, 0.08472882843017578, 0.0831272964477539, 0.08487731170654297, 0.08553369903564453, 0.08621772766113281, 0.08491315460205077, 0.08398233795166016, 0.08506777954101563, 0.08603961944580078, 0.08499603271484375, 0.0849438705444336, 0.08485478210449218, 0.08516095733642579, 0.08507596588134765, 0.08500531005859376, 0.08534015655517578, 0.08566169738769532, 0.0850708465576172, 0.08518041229248047, 0.08493363189697266, 0.08562995147705078, 0.08498790740966797, 0.08498585510253906, 0.08526950073242187, 0.08520499420166015, 0.08501145935058593, 0.08523878479003906, 0.082661376953125, 0.0845854721069336, 0.08474726104736328, 0.0852326431274414, 0.08525414276123047, 0.08547122955322266, 0.08509030151367188, 0.08530841827392578, 0.08540364837646484, 0.08494489288330079, 0.08497561645507813, 0.08481587219238282, 0.08475341033935548, 0.08502579498291016, 0.08509542083740235, 0.08502067565917969, 0.08512102508544922, 0.08473705291748047, 0.5177200317382813, 0.08092364501953125, 0.08514765167236328, 0.0842577896118164, 0.08333106994628907, 0.08667545318603516, 0.08635289764404297, 0.08515583801269531, 0.08509849548339844, 0.0846909408569336, 0.08494908905029297, 0.08473385620117188, 0.08456703948974609, 0.08469811248779296, 0.08491417694091796, 0.08470425415039062, 0.080932861328125, 0.08101580810546875, 0.08081203460693359, 0.08047824096679687, 0.08428540802001953, 0.0845998077392578, 0.08174591827392579, 0.08479027557373046, 0.08491417694091796, 0.08499625396728516, 0.08498262023925782, 0.08467250823974609, 0.08499609375, 0.0854128646850586, 0.08483328247070313, 0.08498381042480468, 0.08468889617919922, 0.08487423706054688, 0.08468582153320313, 0.0848015365600586, 0.0852674560546875, 0.08585830688476563, 0.08500326538085938, 0.08540876770019531, 0.08543743896484375, 0.08685164642333984, 0.08631084442138671, 0.08498073577880859, 0.08495001220703124, 0.08483430480957031, 0.08489369964599609, 0.08494694519042968, 0.08446463775634766, 0.08407654571533203, 0.08470425415039062, 0.08453324890136718, 0.08203366088867188, 0.08531148529052734, 0.08482201385498046, 0.08499097442626953, 0.08538829040527343, 0.08473804473876953, 0.08488960266113281, 0.08508415985107422, 0.08515789031982422, 0.08494287872314453, 0.08479126739501953, 0.5212528686523438, 0.084890625, 0.08508108520507812, 0.08611840057373046, 0.08516403198242188, 0.08458956909179688, 0.08463667297363281, 0.08557878112792969, 0.08558902740478516, 0.08462841796875, 0.08482508850097656, 0.08524390411376953, 0.0847267837524414, 0.08469811248779296, 0.08470425415039062, 0.08479539489746094, 0.08464895629882813, 0.08452607727050782, 0.08485379028320313, 0.08477590179443359, 0.08533299255371093, 0.08490598297119141, 0.0849090576171875, 0.08484352111816407, 0.08475955200195312, 0.080753662109375, 0.08075059509277344, 0.08288256072998047, 0.08464486694335938, 0.08470015716552734, 0.08492134094238281, 0.08503501129150391, 0.08485887908935547, 0.08471449279785156, 0.084569091796875, 0.08482099151611328, 0.08460594940185547, 0.08432537841796875, 0.08461209869384766, 0.0847984619140625, 0.0846192626953125, 0.08486605072021484, 0.08466534423828125, 0.0845301742553711, 0.08466636657714843, 0.08366899108886719, 0.08472576141357421, 0.08461824035644532, 0.08455577850341797, 0.08461619567871094, 0.08433561706542969, 0.0844554214477539, 0.08434893035888671, 0.08446463775634766, 0.08080691528320312, 0.08115507507324218, 0.08446771240234376, 0.08471449279785156, 0.084748291015625, 0.08502272033691406, 0.08421692657470703, 0.08310678100585937, 0.08451372528076172, 0.5164462280273437, 0.08081817626953125, 0.08075878143310547, 0.08023859405517578, 0.08031231689453125, 0.0796651840209961, 0.08062461090087891, 0.08074342346191406, 0.08057036590576172, 0.08049049377441406, 0.08030207824707031, 0.08085913848876954, 0.08074854278564453, 0.08060313415527344, 0.0807567367553711, 0.08064617919921875, 0.08057341003417968, 0.0805775375366211, 0.08061542510986328, 0.08045158386230469, 0.08406835174560547, 0.08073932647705079, 0.08241868591308593, 0.08514765167236328, 0.08534220886230469, 0.08399565124511718, 0.08037580871582031, 0.08123699188232422, 0.08072294616699219, 0.08092572784423828, 0.08314777374267578, 0.08090415954589844, 0.08274329376220703, 0.08470527648925781, 0.0807874526977539, 0.08084786987304687, 0.08369868469238281, 0.08458444976806641, 0.08475750732421874, 0.08480563354492188, 0.08099635314941406, 0.08079974365234376, 0.08065535736083984, 0.08275865936279297, 0.08473603057861329, 0.08466326141357422, 0.0848384017944336, 0.08517120361328125, 0.08535552215576171, 0.08433663940429688, 0.08464588928222656, 0.08481587219238282, 0.08481484985351563, 0.08510975646972656, 0.08468275451660157, 0.0861143035888672, 0.08548454284667968, 0.08472064208984376, 0.08493055725097656, 0.08681574249267578, 0.08619725036621094, 0.08521932983398438, 0.08474726104736328, 0.5163919067382813, 0.08359117126464843, 0.08464691162109375, 0.08525004577636719, 0.08071167755126953, 0.0807608642578125, 0.08082121276855468, 0.08076799774169922, 0.08060006713867188, 0.08175001525878907, 0.08488243103027343, 0.08472166442871094, 0.08394445037841797, 0.08496435546875, 0.0847267837524414, 0.08517120361328125, 0.08457526397705079, 0.08479126739501953, 0.08574156951904296, 0.08485990142822265, 0.08481382751464844, 0.08460291290283203, 0.08464790344238281, 0.08110489654541016, 0.08444927978515625, 0.08513843536376953, 0.08490290832519531, 0.08127078247070313, 0.08087654113769531, 0.08389324951171875, 0.0847083511352539, 0.08498381042480468, 0.08485068511962891, 0.08423526763916016, 0.08490918731689454, 0.08473484802246094, 0.08484044647216797, 0.08504319763183593, 0.0810618896484375, 0.0811878433227539, 0.08054271697998047, 0.0806123504638672, 0.0807034912109375, 0.08408370971679688, 0.08534937286376953, 0.08435711669921875, 0.08488448333740234, 0.08481177520751954, 0.08471552276611329, 0.08489574432373047, 0.08478822326660156, 0.0851937255859375, 0.08481692504882812, 0.08490595245361328, 0.08488140869140624, 0.08486809539794922, 0.08466534423828125, 0.08551526641845703, 0.0852490234375, 0.08489266967773437, 0.08475852966308593, 0.08525721740722657, 0.0845660171508789, 0.5165946655273438, 0.08096256256103515, 0.08067686462402343, 0.08383283233642579, 0.08545587158203125, 0.08592691040039062, 0.08542105865478515, 0.08580812835693359, 0.08653107452392578, 0.08479027557373046, 0.08508006286621093, 0.08506777954101563, 0.08509951782226563, 0.08516403198242188, 0.08509235382080078, 0.08480870056152344, 0.08497872161865234, 0.0852776641845703, 0.08520703887939453, 0.08543846130371094, 0.08479027557373046, 0.08502579498291016, 0.08532685089111328, 0.08513433837890624, 0.08513228607177735, 0.0849991683959961, 0.08475341033935548, 0.08500121307373047, 0.08525004577636719, 0.08603238677978516, 0.0857149429321289, 0.08504524993896484, 0.08503193664550782, 0.08063385772705078, 0.08257945251464843, 0.08591462707519532, 0.08676454162597656, 0.08414208221435547, 0.08625151824951172, 0.0863109130859375, 0.085501953125, 0.08678092956542968, 0.08597503662109375, 0.0852490234375, 0.0853780517578125, 0.08530944061279297, 0.08511385345458984, 0.08488550567626953, 0.0850882568359375, 0.08629043579101563, 0.08513536071777343, 0.08496640014648438, 0.08540364837646484, 0.08508108520507812, 0.0853729248046875, 0.08491827392578125, 0.08530226898193359, 0.08532994842529297, 0.08504521942138672, 0.0850882568359375, 0.08540160369873047, 0.08498381042480468, 0.08480668640136718, 0.5231185302734375, 0.08555007934570312, 0.085212158203125, 0.0849991683959961, 0.08495820617675781, 0.08518758392333985, 0.08489881896972656, 0.08519577789306641, 0.08499404907226563, 0.0853544921875, 0.08507087707519531, 0.08468988800048828, 0.0851773452758789, 0.08512409973144532, 0.08484146881103516, 0.08553472137451172, 0.08454454040527344, 0.08496841430664062, 0.08502374267578125, 0.08565964508056641, 0.08568627166748047, 0.0853903350830078, 0.0851230697631836, 0.0851251220703125, 0.0851600341796875, 0.08550185394287109, 0.08520089721679687, 0.08487014770507813, 0.08488038635253906, 0.08486502075195312, 0.08505958557128906, 0.08553062438964844, 0.08553369903564453, 0.08480973052978516, 0.08504524993896484, 0.08505241394042969, 0.08521318054199219, 0.08505753326416016, 0.08548044586181641, 0.08534835052490235, 0.08536576080322265, 0.08545996856689453, 0.08545996856689453, 0.08539647674560547, 0.08540057373046875, 0.08492031860351562, 0.08551526641845703, 0.08133529663085938, 0.08056217956542969, 0.08036351776123046, 0.08066969299316407, 0.08025395202636719, 0.08039628601074218, 0.08287641906738281, 0.08132608032226563, 0.08082329559326172, 0.08066867065429688, 0.0822999038696289, 0.08375296020507812, 0.08239103698730468, 0.08026112365722657, 0.08034611511230469, 0.08190873718261718, 0.5161922607421875, 0.08048332977294922, 0.08032358551025391, 0.08309555053710938, 0.08317644500732421, 0.0805580825805664, 0.08210636901855468, 0.08081407928466797, 0.08045260620117188, 0.08023040008544922, 0.08245350646972656, 0.08249139404296875, 0.08060006713867188, 0.08049459075927734, 0.07992115020751953, 0.08049977874755859, 0.08055289459228515, 0.08052326202392578, 0.08077516937255859, 0.08043520355224609, 0.0833034210205078, 0.08294092559814453, 0.08160768127441406, 0.08223337554931641, 0.08221385955810546, 0.08159744262695312, 0.08116838073730469, 0.08049049377441406, 0.08033586883544921, 0.08034918212890625, 0.08060214233398437, 0.08028873443603515, 0.08058675384521484, 0.08057036590576172, 0.08030413055419922, 0.08308633422851562, 0.08312934112548828, 0.08227123260498047, 0.08059187316894531, 0.08236550140380859, 0.08107414245605468, 0.08041264343261718, 0.08030416107177735, 0.08019350433349609, 0.0805038070678711, 0.08295935821533203, 0.08059903717041016, 0.0819609603881836, 0.08271769714355469, 0.08190156555175782, 0.08018841552734375, 0.08054988861083984, 0.08026624298095703, 0.08369356536865234, 0.08052531433105468, 0.08051507568359376, 0.08030719757080078, 0.08242793273925782, 0.08046998596191406, 0.08084377288818359, 0.08229580688476562, 0.08067276763916016, 0.08043110656738281, 0.5164246826171875, 0.08162815856933593, 0.08057965087890626, 0.08042387390136718, 0.08143666839599609, 0.08144281768798828, 0.08223232269287109, 0.08032972717285156, 0.0812779541015625, 0.08238285064697265, 0.08033280181884765, 0.08039218902587891, 0.0803594207763672, 0.08050176239013672, 0.0800552978515625, 0.08041165161132813, 0.080468994140625, 0.08084172821044922, 0.08128614044189453, 0.08042393493652343, 0.08206028747558594, 0.08061952209472656, 0.08051507568359376, 0.0803420181274414, 0.08279654693603515, 0.08056729888916016, 0.08061548614501954, 0.080133056640625, 0.08056626892089844, 0.08235724639892578, 0.08163430023193359, 0.08038706970214844, 0.08184012603759766, 0.08037785339355469, 0.0805027847290039, 0.08062770843505859, 0.0814172134399414, 0.08300748443603516, 0.08400902557373047, 0.08350918579101563, 0.08451481628417969, 0.0817254409790039, 0.08179814147949219, 0.08327993774414062, 0.08242066955566406, 0.08332598114013672, 0.08047100830078124, 0.08046387481689453, 0.08042803192138671, 0.08188108825683593, 0.08300032043457031, 0.08036966705322265, 0.08046489715576172, 0.08080178833007813, 0.08087039947509765, 0.08029388427734375, 0.08035430145263672, 0.08065331268310547, 0.08069631958007813, 0.08063385772705078, 0.08074342346191406, 0.08058367919921874, 0.08041881561279297, 0.5169356689453125, 0.080574462890625, 0.08031948852539063, 0.08043929290771484, 0.08171826934814454, 0.0848189468383789, 0.08166194915771484, 0.08131072235107421, 0.08058777618408203, 0.08282316589355469, 0.08184524536132813, 0.08173056030273437, 0.08203263854980469, 0.08320511627197266, 0.08302694702148437, 0.08298188781738282, 0.08223846435546875, 0.08015769958496094, 0.08293170928955078, 0.08339968109130859, 0.08318873596191406, 0.08285183715820313, 0.08314470672607421, 0.0827658233642578, 0.08057651519775391, 0.08049561309814453, 0.08271158599853516, 0.08238076782226562, 0.08045980834960938, 0.08164553833007812, 0.08335871887207032, 0.08263782501220703, 0.08058477020263671, 0.08046176147460937, 0.08078028869628906, 0.08137318420410156, 0.08054271697998047, 0.08029901123046874, 0.08063180541992188, 0.08216371154785156, 0.08048230743408204, 0.0804874267578125, 0.08042700958251953, 0.08028467559814453, 0.08053555297851563, 0.08054886627197265, 0.08183500671386719, 0.08184320068359376, 0.0805406723022461, 0.08033177947998046, 0.08072908782958985, 0.08007577514648437, 0.08035020446777344, 0.08032051086425782, 0.08077721405029296, 0.08069427490234375, 0.08040857696533203, 0.07999078369140625, 0.08025190734863281, 0.08065843200683594, 0.08293888092041016, 0.08307711791992188, 0.08205107116699219]",tokens/s,11.164341251974012,,,main,False,False,True
@@ -10693,7 +10693,7 @@ Traceback (most recent call last):
10693
  hf_raise_for_status(response)
10694
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
10695
  raise HfHubHTTPError(message, response=response) from e
10696
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c302-4a1d16227404758f4fb2ab3c;02477c7a-eed6-4df0-809d-b85c11efcd64)
10697
 
10698
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
10699
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
@@ -10856,7 +10856,7 @@ Traceback (most recent call last):
10856
  hf_raise_for_status(response)
10857
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
10858
  raise RepositoryNotFoundError(message, response) from e
10859
- huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6678c32b-1565e9de2c3b72002093cf92;12616680-f397-4eed-a58a-eb69fb904373)
10860
 
10861
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
10862
  Please make sure you specified the correct `repo_id` and `repo_type`.
@@ -10917,7 +10917,7 @@ Traceback (most recent call last):
10917
  hf_raise_for_status(response)
10918
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
10919
  raise HfHubHTTPError(message, response=response) from e
10920
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2af-04882a0716bb104a2f698069;b3a338c6-b944-4d02-9f21-01eb6fb0ece0)
10921
 
10922
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
10923
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
@@ -11121,7 +11121,7 @@ Traceback (most recent call last):
11121
  hf_raise_for_status(response)
11122
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
11123
  raise HfHubHTTPError(message, response=response) from e
11124
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2d8-1bb18c606ad4976b5228fc61;6322c40d-dd44-4377-87a2-f085396af7e6)
11125
 
11126
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
11127
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
@@ -11974,7 +11974,7 @@ Traceback (most recent call last):
11974
  hf_raise_for_status(response)
11975
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
11976
  raise HfHubHTTPError(message, response=response) from e
11977
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c080-6ad34c52261a189c4110c570;75cc89c5-ef2a-40b0-b73a-2d09fce6fbcb)
11978
 
11979
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
11980
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
@@ -13417,7 +13417,7 @@ Traceback (most recent call last):
13417
  hf_raise_for_status(response)
13418
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
13419
  raise HfHubHTTPError(message, response=response) from e
13420
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2ed-6c79ed38491b2bdc3bca4a1c;11189be4-8bec-4ba5-b6ee-0243dc64feab)
13421
 
13422
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
13423
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
@@ -13584,7 +13584,7 @@ Traceback (most recent call last):
13584
  hf_raise_for_status(response)
13585
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
13586
  raise RepositoryNotFoundError(message, response) from e
13587
- huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6678c316-0009c6dc2961c7724a37d5d5;636dd7fc-3cdb-4061-80ff-7b67be31a8ec)
13588
 
13589
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
13590
  Please make sure you specified the correct `repo_id` and `repo_type`.
@@ -13645,7 +13645,7 @@ Traceback (most recent call last):
13645
  hf_raise_for_status(response)
13646
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
13647
  raise HfHubHTTPError(message, response=response) from e
13648
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c29a-67b3b3b74b4311b45758843b;aa86b7fd-2e90-425f-8960-903d223391ff)
13649
 
13650
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
13651
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
@@ -13823,7 +13823,7 @@ Traceback (most recent call last):
13823
  hf_raise_for_status(response)
13824
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
13825
  raise HfHubHTTPError(message, response=response) from e
13826
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c2c3-343a2b93742c552a763b3570;367aaf35-68fa-4494-af90-c94e8affb8ad)
13827
 
13828
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
13829
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
@@ -14339,7 +14339,7 @@ Traceback (most recent call last):
14339
  hf_raise_for_status(response)
14340
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
14341
  raise HfHubHTTPError(message, response=response) from e
14342
- huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6678c06b-31a835334f5fa11755c37b88;ed7f27da-1051-45d4-962e-0ffdd38a1071)
14343
 
14344
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
14345
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
 
916
  hf_raise_for_status(response)
917
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
918
  raise HfHubHTTPError(message, response=response) from e
919
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1466-3396c2d6560cd53d6164fae3;15599486-4ec7-418a-9325-a99750665c3f)
920
 
921
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
922
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
 
1051
  hf_raise_for_status(response)
1052
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
1053
  raise RepositoryNotFoundError(message, response) from e
1054
+ huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-667a1493-3dfb6bba548ab4767a9a2060;90bbf638-28bc-45cc-bf08-ab41b6d66825)
1055
 
1056
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
1057
  Please make sure you specified the correct `repo_id` and `repo_type`.
 
1112
  hf_raise_for_status(response)
1113
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
1114
  raise HfHubHTTPError(message, response=response) from e
1115
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1409-5801cd1e6298084a7046c9b6;1ff2c613-51fc-42e6-969d-5e7d0ba16abe)
1116
 
1117
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
1118
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
 
1260
  hf_raise_for_status(response)
1261
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
1262
  raise HfHubHTTPError(message, response=response) from e
1263
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a143a-025fd49f3074eab656635e11;e2ce13f9-abdb-4cd7-818f-9bd3ffd4d681)
1264
 
1265
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
1266
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
 
1754
  hf_raise_for_status(response)
1755
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
1756
  raise HfHubHTTPError(message, response=response) from e
1757
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a11d2-69b57fdc45f086621971e625;cc096f9d-1047-4f5f-a1b9-80008a9453e5)
1758
 
1759
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
1760
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
 
3275
  hf_raise_for_status(response)
3276
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
3277
  raise HfHubHTTPError(message, response=response) from e
3278
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a146d-63f1c7e766e0f1705a5c8082;d842abff-2b26-4188-af83-930ebd69caff)
3279
 
3280
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
3281
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
 
3470
  hf_raise_for_status(response)
3471
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
3472
  raise RepositoryNotFoundError(message, response) from e
3473
+ huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-667a1499-35da756b34d796bf5e9469ad;76b1c40f-2061-45f3-8a60-2aafc401c3d1)
3474
 
3475
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
3476
  Please make sure you specified the correct `repo_id` and `repo_type`.
 
3531
  hf_raise_for_status(response)
3532
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
3533
  raise HfHubHTTPError(message, response=response) from e
3534
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1414-4f951c9b7acd78830fcb93bf;59855539-7278-4cf5-bbec-5df6376cb2db)
3535
 
3536
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
3537
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
 
3765
  hf_raise_for_status(response)
3766
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
3767
  raise HfHubHTTPError(message, response=response) from e
3768
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1441-65c1eae23f86af274d3ae018;044a4f7d-b0e8-403b-8281-7c8338f51bf4)
3769
 
3770
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
3771
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
 
4610
  hf_raise_for_status(response)
4611
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
4612
  raise HfHubHTTPError(message, response=response) from e
4613
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a11d9-4ce4c44e196916db21729cdb;c2b56de1-3d0c-4d31-a2bc-32ba2ae9b50d)
4614
 
4615
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
4616
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
 
5032
  cls._check_and_enable_flash_attn_2(
5033
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
5034
  raise ValueError(
5035
+ ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpnx72l9l8/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
5036
 
5037
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
5038
  8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
 
5693
  cls._check_and_enable_flash_attn_2(
5694
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
5695
  raise ValueError(
5696
+ ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp7a4kx4lh/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
5697
 
5698
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
5699
  8bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1753.206784,4370.989056,0.0,3716.153344,3502.931968,s,10,0.7158005905151366,0.07158005905151367,0.0026151097512319512,0.07056062698364257,0.07317087783813477,0.0761810390472412,0.07858916801452637,"[0.07919120025634765, 0.07085072326660156, 0.07094694519042968, 0.07031388854980469, 0.07023776245117187, 0.07029948425292969, 0.07049616241455078, 0.072501953125, 0.07033737945556641, 0.07062509155273437]",tokens/s,3576.4150434098656,kWh,8.388341447797885e-07,4.5958770674104095e-07,1.8877674676594226e-06,3.1861893191802524e-06,tokens/kWh,80346763.5959134,MB,1753.206784,4370.989056,0.0,3716.153344,3632.817664,s,10,44.667627929687505,4.46676279296875,0.018600767003160216,4.460619384765625,4.485654638671876,4.498310229492188,4.5084347021484374,"[4.48284228515625, 4.4655009765625, 4.45241162109375, 4.45341796875, 4.44769091796875, 4.4522490234375, 4.481310546875, 4.4602392578125, 4.46099951171875, 4.5109658203125]",tokens/s,14.104174078634749,kWh,5.310124745159473e-05,2.9102769588021433e-05,9.395025364714023e-05,0.0001761542706867564,tokens/kWh,357641.05947807967,,s,629,45.231303550720156,0.07190986256076345,0.008457534828436213,0.07048806762695313,0.07254323120117187,0.07291353302001953,0.14091268249511718,"[0.07149763488769531, 0.07236300659179687, 0.07048397064208985, 0.07265382385253906, 0.07177318572998047, 0.07225138854980469, 0.07384473419189454, 0.07193087768554687, 0.07062732696533203, 0.07043583679199218, 0.070434814453125, 0.07033241271972657, 0.07061817932128907, 0.07013779449462891, 0.07195852661132812, 0.07246438598632812, 0.07255142211914062, 0.07255961608886718, 0.07078912353515625, 0.07081983947753906, 0.07044915008544922, 0.07034265899658203, 0.07060787200927734, 0.07021568298339843, 0.07060275268554687, 0.07041433715820312, 0.07039282989501953, 0.07039385223388672, 0.07066316986083984, 0.07061299133300782, 0.07034162902832031, 0.07017164611816407, 0.07033654022216797, 0.07016553497314452, 0.0703917465209961, 0.07013990020751953, 0.07160626983642578, 0.07050240325927734, 0.07033548736572266, 0.07311052703857422, 0.07278694152832031, 0.07141990661621093, 0.07023411560058594, 0.07154585266113281, 0.07023104095458985, 0.07205171203613281, 0.07074508666992188, 0.07263231658935547, 0.07006617736816406, 0.07114649963378906, 0.07070105743408203, 0.07260057830810547, 0.07075430297851562, 0.07085670471191406, 0.07270502471923829, 0.0717649917602539, 0.070434814453125, 0.07098060607910156, 0.07048089599609375, 0.07204761505126953, 0.07027814483642578, 0.07079833221435547, 0.14196018981933595, 0.07093965148925781, 0.07223808288574218, 0.0704194564819336, 0.07073382568359375, 0.0712837142944336, 0.07221247863769531, 0.07236198425292968, 0.0719656982421875, 0.07015936279296875, 0.07168716430664063, 0.07038979339599609, 0.07106658935546875, 0.0719288330078125, 0.07191756439208985, 0.07084031677246094, 0.07375462341308593, 0.07192269134521484, 0.07429017639160156, 0.07091712188720703, 0.07006105804443359, 0.0702371826171875, 0.07047277069091797, 0.0703354263305664, 0.07038566589355469, 0.07033856201171874, 0.07036927795410156, 0.07030681610107421, 0.07035903930664063, 0.07051980590820313, 0.0704532470703125, 0.07031910705566406, 0.07042253112792969, 0.0697681884765625, 0.07008972930908203, 0.07012351989746093, 0.06991155242919922, 0.07039590454101563, 0.0702730255126953, 0.0704368667602539, 0.0709191665649414, 0.07057107543945312, 0.07038355255126953, 0.06974566650390625, 0.07057408142089844, 0.07198207855224609, 0.07142912292480469, 0.07038668823242188, 0.07020134735107422, 0.07028018951416015, 0.0721817626953125, 0.07068876647949218, 0.0739788818359375, 0.07211827087402344, 0.07048703765869141, 0.07026073455810547, 0.07033856201171874, 0.06981843566894531, 0.07036307525634766, 0.07035903930664063, 0.07015936279296875, 0.0702525405883789, 0.07032115173339844, 0.14248550415039063, 0.0704532470703125, 0.07049625396728515, 0.07205888366699219, 0.07040105438232422, 0.07024227142333984, 0.0702730255126953, 0.0700549087524414, 0.07016038513183594, 0.07020751953125, 0.07015318298339844, 0.07036006164550782, 0.07208345794677734, 0.07120384216308594, 0.07030886077880859, 0.07037747192382812, 0.07025459289550781, 0.07048806762695313, 0.07146086120605469, 0.07031705474853515, 0.07024639892578124, 0.07025357055664062, 0.07015628814697265, 0.07031603240966797, 0.07018701171875, 0.07035289764404297, 0.07062220764160156, 0.07034982299804687, 0.07380480194091797, 0.07419801330566406, 0.07048499298095703, 0.07023104095458985, 0.07022489929199219, 0.07049625396728515, 0.07031910705566406, 0.06968934631347656, 0.07044915008544922, 0.07046144104003907, 0.07019007873535156, 0.0704931869506836, 0.07031504058837891, 0.07061500549316406, 0.07025459289550781, 0.06980095672607421, 0.07020543670654297, 0.07057920074462891, 0.07047270202636718, 0.07049215698242188, 0.07036313629150391, 0.0703969955444336, 0.07029548645019532, 0.07073689270019531, 0.07023206329345703, 0.07331123352050781, 0.07239373016357421, 0.07279411315917969, 0.07025049591064453, 0.07048601531982422, 0.07033139038085938, 0.07038771057128906, 0.07031398773193359, 0.07084646606445312, 0.07017683410644532, 0.1412003173828125, 0.07280127716064454, 0.07291295623779297, 0.07079011535644532, 0.07045833587646484, 0.06992281341552735, 0.07036006164550782, 0.07027712249755859, 0.07034368133544922, 0.070329345703125, 0.07038361358642578, 0.07045938873291016, 0.07043276977539062, 0.07054847717285156, 0.07040716552734375, 0.07084953308105468, 0.07044198608398437, 0.07054541015625, 0.07037337493896484, 0.07072358703613281, 0.07076454162597656, 0.07148953247070312, 0.07052082824707032, 0.0703795166015625, 0.0707430419921875, 0.07142809295654297, 0.07075020599365234, 0.07038259124755859, 0.07049215698242188, 0.07050035095214843, 0.07211110687255859, 0.07184690856933594, 0.07365119934082032, 0.0705771484375, 0.06972825622558594, 0.0703672332763672, 0.07038566589355469, 0.07066214752197265, 0.07053619384765625, 0.07096217346191407, 0.07095398712158203, 0.0704368667602539, 0.07037849426269531, 0.07100518035888671, 0.07086080169677735, 0.07033446502685547, 0.07027609252929687, 0.07033446502685547, 0.07015017700195313, 0.07036412811279297, 0.07049420928955077, 0.07031603240966797, 0.07026585388183594, 0.07072870635986328, 0.07130521392822266, 0.07034572601318359, 0.07048703765869141, 0.07038976287841797, 0.07045529937744141, 0.07031807708740234, 0.07038566589355469, 0.07027097320556641, 0.07006105804443359, 0.14092185974121094, 0.0704901123046875, 0.07320371246337891, 0.07042150115966797, 0.07148339080810547, 0.07050240325927734, 0.06999040222167968, 0.07028530883789062, 0.0701317138671875, 0.07035596466064453, 0.07015526580810547, 0.07016550445556641, 0.07022592163085938, 0.07046144104003907, 0.07004672241210938, 0.07037337493896484, 0.07001395416259766, 0.07048703765869141, 0.07069593811035156, 0.0704676513671875, 0.07074195098876954, 0.0702485122680664, 0.0703180160522461, 0.07263129425048828, 0.07168307495117188, 0.07106047821044922, 0.07031193542480468, 0.0709939193725586, 0.07001087951660157, 0.0700231704711914, 0.06932275390625, 0.0694999008178711, 0.07037133026123046, 0.07136972808837891, 0.06993408203125, 0.06980403137207031, 0.07046963500976562, 0.07041843414306641, 0.07022796630859375, 0.07048397064208985, 0.07091302490234375, 0.07066422271728516, 0.07354160308837891, 0.07238349151611329, 0.07298252868652344, 0.07026687622070313, 0.07172300720214844, 0.07047782135009766, 0.07016754913330078, 0.07050137329101562, 0.07046348571777344, 0.0704194564819336, 0.07021363067626953, 0.07042662048339844, 0.07023513793945313, 0.07038361358642578, 0.07011328125, 0.07033245086669922, 0.07018902587890626, 0.07036927795410156, 0.06995763397216796, 0.07021363067626953, 0.07022182464599609, 0.14130278015136719, 0.07017369842529297, 0.07012454223632812, 0.07298764801025391, 0.07130521392822266, 0.07037849426269531, 0.070181884765625, 0.07114342498779297, 0.07037644958496093, 0.07040921783447265, 0.07054438018798828, 0.07054847717285156, 0.07034060668945312, 0.0706529312133789, 0.07021977233886718, 0.0706529312133789, 0.07132982635498047, 0.07049520111083984, 0.0703672332763672, 0.07049113464355469, 0.07051776123046875, 0.07055667114257813, 0.07032627105712891, 0.07019929504394531, 0.07023411560058594, 0.0705638427734375, 0.0705955810546875, 0.07053311920166015, 0.07051264190673828, 0.07064784240722656, 0.07040406036376953, 0.07043993377685547, 0.07029350280761719, 0.07095097351074219, 0.07063545227050781, 0.07042969512939454, 0.0700794906616211, 0.07004876708984376, 0.07055564880371094, 0.07064268493652344, 0.0701470718383789, 0.07053823852539062, 0.07053209686279296, 0.07260671997070313, 0.071804931640625, 0.07340850830078124, 0.07225446319580078, 0.07109529876708984, 0.07053926086425781, 0.0705269775390625, 0.07047885131835938, 0.07037849426269531, 0.07057817840576172, 0.07064985656738282, 0.07054950714111329, 0.07037337493896484, 0.0700426254272461, 0.07076044464111328, 0.07062937927246093, 0.07047270202636718, 0.07053215789794921, 0.07040300750732421, 0.07008665466308593, 0.1408890838623047, 0.0700579833984375, 0.07040306854248046, 0.07095603179931641, 0.07067135620117188, 0.07052902221679687, 0.07032217407226563, 0.07087411499023437, 0.07056179046630859, 0.07040306854248046, 0.07119155120849609, 0.07045222473144531, 0.07153561401367188, 0.07102361297607422, 0.07295692443847657, 0.07263641357421875, 0.07663104248046874, 0.0731514892578125, 0.07261190032958985, 0.07177107238769531, 0.07096627044677735, 0.07251455688476563, 0.07052288055419922, 0.07034982299804687, 0.07089356994628906, 0.07131443023681641, 0.07168000030517578, 0.07194931030273438, 0.07047577667236328, 0.07285247802734375, 0.07253094482421875, 0.07324364471435547, 0.07126322937011718, 0.0699504623413086, 0.0695050277709961, 0.06952652740478515, 0.0703641586303711, 0.07017676544189454, 0.06994944000244141, 0.06934425354003906, 0.0704686050415039, 0.0703969955444336, 0.07023200225830079, 0.07009894561767578, 0.0696596450805664, 0.07089459228515625, 0.07034674835205078, 0.0717158432006836, 0.07167692565917969, 0.07193702697753906, 0.07162572479248047, 0.07148748779296875, 0.07270706939697266, 0.07029759979248047, 0.0700979232788086, 0.07177833557128906, 0.07124784088134765, 0.07156326293945313, 0.0718397445678711, 0.07128985595703125, 0.07057920074462891, 0.07054438018798828, 0.07034880065917969, 0.14348287963867187, 0.07049830627441406, 0.07024639892578124, 0.0706170883178711, 0.06970982360839843, 0.07032524871826172, 0.07015833282470703, 0.07006208038330078, 0.07034371185302735, 0.07042147064208984, 0.0702003173828125, 0.07272038269042969, 0.07259852600097656, 0.07238041687011719, 0.07269478607177735, 0.07254118347167969, 0.07344640350341797, 0.07283404541015626, 0.0721981430053711, 0.07240908813476563, 0.07258624267578125, 0.07021670532226562, 0.07021670532226562, 0.07076249694824219, 0.07023308563232422, 0.07016960144042969, 0.0699504623413086, 0.07017472076416016, 0.07204351806640626, 0.07038873291015625, 0.07032012939453125, 0.07037439727783203, 0.07026687622070313, 0.07033139038085938, 0.0702525405883789, 0.07038259124755859, 0.07035801696777344, 0.0702894058227539, 0.07015731048583984, 0.07010304260253906, 0.0701470718383789, 0.07023312377929687, 0.07039177703857422, 0.07029657745361328, 0.0702371826171875, 0.07127552032470703, 0.07026278686523438, 0.07011532592773438, 0.07021670532226562, 0.0706324462890625, 0.07047885131835938, 0.07055052947998047, 0.07046451568603515, 0.07022284698486328, 0.07026892852783204, 0.07008460998535156, 0.07233433532714843, 0.07216537475585938, 0.07277977752685547, 0.07074610900878907, 0.06977126312255859, 0.06972006225585937, 0.06992998504638671, 0.14085635375976563, 0.06998320007324219, 0.070329345703125, 0.07024127960205079, 0.07040921783447265, 0.06970572662353515, 0.07047065734863281, 0.07047577667236328, 0.07034880065917969, 0.07042047882080078, 0.07067340850830078, 0.07035801696777344, 0.07065087890625, 0.07035494232177734, 0.07040921783447265, 0.07037542724609375, 0.07243059539794922, 0.07194419097900391, 0.07237017822265625, 0.070329345703125, 0.07091404724121093, 0.07100927734375, 0.07039794921875, 0.07029964447021485, 0.07054438018798828, 0.07038976287841797, 0.07050137329101562, 0.07172505950927735, 0.073301025390625, 0.07376380920410157, 0.0728616943359375, 0.07057202911376953, 0.07004160308837891, 0.06957158660888672, 0.07051570892333985, 0.07044198608398437, 0.07060582733154297, 0.0704716796875, 0.07048806762695313, 0.07044403076171875, 0.07057510375976563, 0.07046041870117188, 0.07126732635498047, 0.07257907104492188, 0.07077581024169922, 0.07048601531982422, 0.07047987365722656, 0.0702730255126953, 0.06994739532470703, 0.07239577484130859, 0.07052902221679687, 0.07033036804199219, 0.07229440307617188, 0.07042969512939454, 0.07047782135009766, 0.07042156982421875, 0.07127855682373047, 0.07062627410888672, 0.07017164611816407, 0.0716042251586914, 0.07137177276611328, 0.07010201263427734, 0.07049420928955077, 0.14187315368652345, 0.07062322998046874, 0.07053721618652344, 0.07057612609863281, 0.07067750549316407, 0.070793212890625, 0.07038566589355469, 0.07079936218261719, 0.0702730255126953, 0.070614013671875, 0.07046553802490234, 0.0705116195678711, 0.07048397064208985, 0.07051673889160157, 0.0705423355102539, 0.07029043579101563, 0.07032319641113281, 0.0703078384399414, 0.07160012817382813, 0.07250431823730469, 0.07043788909912109, 0.07060384368896484, 0.07045728302001954, 0.07036825561523438, 0.07142400360107422, 0.07265177917480468, 0.07246028900146484, 0.07253606414794922, 0.07204659271240234, 0.07295078277587891, 0.0729139175415039, 0.07195340728759765, 0.072416259765625, 0.07247154998779297, 0.07184690856933594, 0.07263231658935547, 0.07274086761474609, 0.0717096939086914, 0.07236402893066406, 0.07166976165771484, 0.07194931030273438, 0.07173426818847656, 0.07251148986816407, 0.07147315216064454, 0.07262515258789062, 0.0719482879638672, 0.07177523040771484, 0.07152435302734375, 0.0726824951171875, 0.07250125122070313, 0.07197081756591797, 0.07249203491210937, 0.07244595336914063, 0.07192063903808593, 0.07164313507080078, 0.07215821075439453, 0.07262723541259766, 0.071847900390625, 0.07213568115234376, 0.07099801635742188, 0.07239577484130859, 0.07256371307373047, 0.07234559631347656]",tokens/s,13.906298307203762,,,main,False,False,
 
5997
  hf_raise_for_status(response)
5998
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
5999
  raise HfHubHTTPError(message, response=response) from e
6000
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1485-5bda482b1becebf26e06eed6;daecc226-2804-4d1c-99eb-728b569575b9)
6001
 
6002
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
6003
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
 
6118
  cls._check_and_enable_flash_attn_2(
6119
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
6120
  raise ValueError(
6121
+ ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpf_wrkr98/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
6122
 
6123
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
6124
  8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
 
6160
  hf_raise_for_status(response)
6161
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
6162
  raise RepositoryNotFoundError(message, response) from e
6163
+ huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-667a14ad-7764cd5e3083b8573945a4bf;46c746c8-d734-4eae-98d9-50e76407ba6b)
6164
 
6165
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
6166
  Please make sure you specified the correct `repo_id` and `repo_type`.
 
6221
  hf_raise_for_status(response)
6222
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
6223
  raise HfHubHTTPError(message, response=response) from e
6224
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a142a-73bb2d150e2cb01f2afc904f;86dcacd6-b3e6-4392-96fe-1ac878567c6a)
6225
 
6226
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
6227
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
 
6369
  hf_raise_for_status(response)
6370
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
6371
  raise HfHubHTTPError(message, response=response) from e
6372
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1456-675ebc755000167e43d8c147;60601b45-fe61-473b-b09d-c1ebb3c21986)
6373
 
6374
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
6375
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
 
6627
  cls._check_and_enable_flash_attn_2(
6628
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
6629
  raise ValueError(
6630
+ ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpx99s_9ee/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
6631
 
6632
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
6633
  8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
 
6919
  hf_raise_for_status(response)
6920
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
6921
  raise HfHubHTTPError(message, response=response) from e
6922
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a11ed-52a1b57c5328906a351e4ea2;57edafe3-571d-4df7-9e06-b4d9a3f0bbfd)
6923
 
6924
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
6925
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
 
7125
  cls._check_and_enable_flash_attn_2(
7126
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
7127
  raise ValueError(
7128
+ ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpkmxovvz8/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
7129
 
7130
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
7131
  8bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,5660.131328,10962.337792,0.0,10307.50208,10029.210624,s,10,1.4342690734863281,0.1434269073486328,0.0032239528002932865,0.14276050567626952,0.14646627197265624,0.14882899169921876,0.15071916748046876,"[0.15119171142578125, 0.14055494689941406, 0.14345762634277343, 0.14594122314453126, 0.14206338500976562, 0.1407220458984375, 0.14052482604980468, 0.14054188537597656, 0.1442948760986328, 0.14497654724121095]",tokens/s,1784.8812662308324,kWh,1.7037357295316364e-06,9.333370224703686e-07,4.414690039685912e-06,7.051762791687916e-06,tokens/kWh,36302979.490710236,MB,5660.131328,10962.337792,0.0,10307.50208,10056.90112,s,10,91.03206542968749,9.10320654296875,0.08025953626004148,9.07454150390625,9.2241923828125,9.259001464843749,9.28684873046875,"[9.293810546875, 9.0487578125, 9.21645703125, 9.09006640625, 9.034521484375, 9.0707138671875, 9.078369140625, 9.0585595703125, 9.098578125, 9.0422314453125]",tokens/s,6.920638316029503,kWh,0.00010834198840080747,5.937985369364469e-05,0.00020726067374491383,0.000374982515839366,tokens/kWh,168007.83326918574,,s,629,92.16478802490228,0.1465258951111325,0.017154203671883243,0.14422015380859374,0.14755471496582032,0.14824775695800782,0.2843094567871094,"[0.1534976043701172, 0.15138217163085937, 0.14815318298339844, 0.14723379516601562, 0.14589552307128906, 0.14708316040039063, 0.14719488525390625, 0.1486684112548828, 0.14777650451660157, 0.14928997802734376, 0.14852505493164062, 0.14738124084472656, 0.14825267028808595, 0.14617190551757814, 0.14766905212402343, 0.14603974914550782, 0.14723788452148437, 0.14720204162597655, 0.14758604431152345, 0.14771302795410157, 0.14740480041503906, 0.14766592407226561, 0.1476065216064453, 0.14713037109375, 0.14739353942871095, 0.14730035400390626, 0.1475030975341797, 0.1471815643310547, 0.1472184295654297, 0.14708837890625, 0.147631103515625, 0.14701676940917968, 0.1473258819580078, 0.14693174743652343, 0.14660508728027344, 0.147368896484375, 0.14727372741699218, 0.14718360900878907, 0.14627839660644532, 0.14710476684570312, 0.14769258117675782, 0.14700233459472656, 0.1472604217529297, 0.14715289306640625, 0.14720204162597655, 0.14623641967773438, 0.14750617980957031, 0.14715493774414062, 0.1470310363769531, 0.147125244140625, 0.1474846649169922, 0.14677708435058595, 0.1466603546142578, 0.14490419006347657, 0.14707200622558594, 0.14715187072753907, 0.14748672485351563, 0.14915379333496093, 0.14982861328125, 0.14755635070800782, 0.14706585693359375, 0.14647807312011718, 0.28261068725585936, 0.14193667602539062, 0.14197244262695313, 0.1418618927001953, 0.14242816162109376, 0.14205644226074218, 0.1417164764404297, 0.14202674865722656, 0.1422878723144531, 0.14224485778808593, 0.14182809448242187, 0.1425244140625, 0.144990234375, 0.14621385192871095, 0.14743449401855468, 0.14801100158691408, 0.14670028686523437, 0.14654258728027345, 0.14298725891113281, 0.14222029113769533, 0.14218547058105468, 0.14629273986816407, 0.14658047485351564, 0.14634701538085937, 0.14226022338867186, 0.14339993286132813, 0.14181272888183594, 0.14232882690429688, 0.14208409118652343, 0.14561077880859374, 0.14698086547851563, 0.1468139190673828, 0.1429698486328125, 0.14215065002441407, 0.14218547058105468, 0.141549560546875, 0.14222950744628907, 0.14219468688964843, 0.14192127990722656, 0.1424701385498047, 0.14214451599121095, 0.14214247131347657, 0.142055419921875, 0.14200729370117188, 0.1421486053466797, 0.14182298278808594, 0.14230117797851563, 0.14190899658203124, 0.14127513122558594, 0.14705255126953126, 0.14671974182128905, 0.1458841552734375, 0.1467658233642578, 0.14730137634277343, 0.14554623413085938, 0.14709759521484375, 0.14683135986328125, 0.14590156555175782, 0.1421158447265625, 0.14215884399414064, 0.14210560607910155, 0.1419694061279297, 0.14311935424804687, 0.2923089904785156, 0.14832540893554688, 0.14229808044433595, 0.14185369873046874, 0.14219161987304688, 0.14211276245117188, 0.1411440887451172, 0.14231138610839844, 0.14515507507324218, 0.14639616394042967, 0.14236569213867187, 0.14185267639160157, 0.14324223327636718, 0.14543565368652345, 0.1462415313720703, 0.14700749206542968, 0.14749183654785156, 0.14913433837890624, 0.14708224487304689, 0.14785842895507811, 0.14836531066894532, 0.14659686279296874, 0.14642585754394533, 0.14595379638671874, 0.1468037109375, 0.14741299438476563, 0.1471866912841797, 0.14695018005371094, 0.14615753173828125, 0.14739865112304687, 0.14753074645996095, 0.14726451110839844, 0.14712832641601561, 0.14760858154296874, 0.14934323120117188, 0.14819839477539062, 0.14772531127929686, 0.14653030395507813, 0.14631321716308593, 0.1442170867919922, 0.1412689971923828, 0.14300569152832032, 0.1467852783203125, 0.144468994140625, 0.1477908477783203, 0.14747853088378907, 0.14710578918457032, 0.14732902526855468, 0.1479761962890625, 0.14744166564941405, 0.14737202453613282, 0.14715904235839844, 0.14731980895996094, 0.14735565185546876, 0.14734335327148437, 0.14759117126464844, 0.14715699768066406, 0.14793318176269532, 0.1475758056640625, 0.1477191619873047, 0.14755430603027345, 0.14633676147460936, 0.1478974151611328, 0.293482421875, 0.14644940185546876, 0.1467545623779297, 0.14730239868164063, 0.14824038696289063, 0.14709564208984374, 0.1482085418701172, 0.14197247314453126, 0.14193356323242187, 0.14270669555664062, 0.14446080017089843, 0.1439488067626953, 0.14200114440917969, 0.141955078125, 0.14212515258789063, 0.14191094970703125, 0.14166220092773438, 0.14212095642089845, 0.1453096923828125, 0.14467481994628906, 0.141955078125, 0.14581964111328125, 0.1451612091064453, 0.14201548767089844, 0.14746214294433593, 0.14255410766601562, 0.14240460205078126, 0.1456005096435547, 0.14647500610351563, 0.14677606201171875, 0.14548991394042968, 0.1466112060546875, 0.14662661743164063, 0.14529632568359374, 0.14660301208496093, 0.14532095336914064, 0.14720204162597655, 0.14515609741210939, 0.14182818603515626, 0.14359234619140626, 0.14211993408203125, 0.14597222900390625, 0.14219981384277344, 0.14482534790039062, 0.1466050567626953, 0.1439139862060547, 0.14232984924316405, 0.14218138122558593, 0.14511410522460938, 0.1470054473876953, 0.14371839904785155, 0.14676480102539063, 0.14239027404785157, 0.1433210906982422, 0.14554112243652345, 0.14293913269042968, 0.14217727661132812, 0.1420052490234375, 0.1420738525390625, 0.14203392028808592, 0.1424547882080078, 0.14198886108398437, 0.1421096954345703, 0.28633599853515623, 0.1421793212890625, 0.1419683837890625, 0.14203494262695313, 0.14164070129394532, 0.142129150390625, 0.1420369873046875, 0.1409792022705078, 0.14163763427734374, 0.14192434692382813, 0.14748876953125, 0.1480816650390625, 0.14249778747558595, 0.14207693481445313, 0.1418946533203125, 0.14191513061523436, 0.14302822875976562, 0.1462671356201172, 0.14379315185546876, 0.1420042266845703, 0.14189056396484376, 0.142166015625, 0.14207283020019532, 0.14194586181640625, 0.1419632568359375, 0.1417574462890625, 0.14206259155273437, 0.14469427490234374, 0.14457958984375, 0.14223770141601563, 0.14819123840332032, 0.14243942260742187, 0.14197760009765625, 0.14243327331542968, 0.14391500854492187, 0.14455091857910157, 0.1463655090332031, 0.1469173126220703, 0.14727372741699218, 0.14673715209960939, 0.14708428955078126, 0.14422015380859374, 0.14662864685058594, 0.14663778686523438, 0.14674227905273438, 0.14578790283203125, 0.14431129455566405, 0.14236671447753907, 0.14213632202148438, 0.14202879333496093, 0.14202983093261717, 0.14231961059570314, 0.144500732421875, 0.1421271057128906, 0.1421967315673828, 0.14232269287109375, 0.1416243133544922, 0.14191513061523436, 0.14222438049316405, 0.14256434631347656, 0.14218240356445314, 0.14231039428710937, 0.14231858825683594, 0.28481637573242186, 0.14673817443847656, 0.1422530517578125, 0.1425244140625, 0.1424435272216797, 0.14243942260742187, 0.141549560546875, 0.14195712280273437, 0.14179942321777345, 0.14219264221191405, 0.14188543701171874, 0.14629682922363282, 0.14243536376953125, 0.14196937561035156, 0.1419141082763672, 0.14319308471679687, 0.14707200622558594, 0.14835813903808595, 0.146914306640625, 0.14225202941894532, 0.1429432373046875, 0.1466439666748047, 0.1463582763671875, 0.14676173400878906, 0.142097412109375, 0.14210456848144531, 0.142328857421875, 0.14207894897460938, 0.14204415893554687, 0.14187519836425783, 0.14248448181152343, 0.1420748748779297, 0.14453657531738281, 0.1497241668701172, 0.14483045959472657, 0.14850559997558593, 0.1470443572998047, 0.1467535400390625, 0.14210560607910155, 0.14510182189941406, 0.1472430114746094, 0.14219366455078125, 0.14655795288085938, 0.14226329040527344, 0.14511616516113282, 0.1430425567626953, 0.14588108825683593, 0.14455807495117187, 0.14212095642089845, 0.14284185791015624, 0.1465180206298828, 0.14465434265136717, 0.14633779907226563, 0.14715391540527345, 0.14545919799804688, 0.14233804321289062, 0.14329347229003905, 0.14232981872558595, 0.14286131286621093, 0.14224076843261718, 0.14246502685546875, 0.14232473754882813, 0.14228172302246095, 0.28571749877929686, 0.14685081481933593, 0.1466060791015625, 0.14657945251464843, 0.14648013305664062, 0.1426606140136719, 0.142166015625, 0.1418383331298828, 0.14211891174316407, 0.14176870727539062, 0.1424547882080078, 0.14432870483398438, 0.1421844482421875, 0.14212300109863282, 0.14295245361328124, 0.14228480529785156, 0.14206668090820312, 0.14567730712890625, 0.14654360961914062, 0.1468538818359375, 0.14368153381347656, 0.14674943542480468, 0.14677920532226563, 0.14910252380371095, 0.1466337890625, 0.14349510192871093, 0.1429053497314453, 0.1412884521484375, 0.14192025756835938, 0.14216397094726563, 0.1419171905517578, 0.14165298461914064, 0.14216294860839843, 0.14221107482910156, 0.1421271057128906, 0.14187930297851561, 0.14223359680175782, 0.14176153564453126, 0.14771302795410157, 0.1455615997314453, 0.1417820129394531, 0.1478830108642578, 0.146044921875, 0.14544895935058594, 0.14200320434570313, 0.1432238006591797, 0.14634495544433593, 0.14659584045410157, 0.14687437438964843, 0.1460561981201172, 0.14661734008789062, 0.14204722595214844, 0.1441239013671875, 0.14674227905273438, 0.14656819152832032, 0.1423207092285156, 0.14195193481445312, 0.1423093719482422, 0.14223155212402344, 0.14204928588867188, 0.14666957092285157, 0.14673715209960939, 0.1421107177734375, 0.2830059509277344, 0.14187110900878908, 0.14194586181640625, 0.1410232391357422, 0.1422387237548828, 0.142055419921875, 0.14226739501953126, 0.14214656066894532, 0.14211482238769532, 0.14605413818359375, 0.1423134765625, 0.14269541931152344, 0.14528614807128906, 0.14245785522460938, 0.14232167053222655, 0.1421558074951172, 0.14252642822265624, 0.14217010498046875, 0.1435924530029297, 0.14213938903808593, 0.14155264282226562, 0.14155264282226562, 0.14195097351074218, 0.14192947387695312, 0.14090956115722655, 0.14435122680664061, 0.1428019561767578, 0.14582473754882813, 0.14670745849609376, 0.14971084594726564, 0.14662042236328124, 0.14527590942382812, 0.14401536560058595, 0.14318284606933593, 0.14638182067871094, 0.14626200866699218, 0.14467277526855468, 0.1420185546875, 0.14180870056152345, 0.14723167419433594, 0.14519500732421875, 0.14663987731933595, 0.1462353973388672, 0.14677606201171875, 0.14902784729003907, 0.1424701385498047, 0.1466265869140625, 0.1469142761230469, 0.144500732421875, 0.14280601501464843, 0.1422776336669922, 0.14262168884277343, 0.14193458557128907, 0.1422878723144531, 0.1456906280517578, 0.14675865173339844, 0.14654360961914062, 0.14573875427246094, 0.14238514709472655, 0.14148812866210939, 0.14229196166992186, 0.1426042938232422, 0.14217625427246094, 0.2911856689453125, 0.14181683349609375, 0.14218751525878906, 0.14203904724121094, 0.14148403930664064, 0.14213529968261718, 0.14200729370117188, 0.14192250061035155, 0.14223033142089844, 0.14233088684082032, 0.1445386199951172, 0.14171449279785156, 0.1443634490966797, 0.1467105255126953, 0.14244557189941406, 0.143383544921875, 0.1467310028076172, 0.1423687744140625, 0.1447403564453125, 0.14611251831054686, 0.1460643768310547, 0.14269541931152344, 0.14153523254394532, 0.1422387237548828, 0.14567526245117188, 0.14662451171875, 0.14528204345703125, 0.14394776916503907, 0.14590975952148438, 0.1455422058105469, 0.14265849304199218, 0.1465630645751953, 0.15092428588867188, 0.14452120971679688, 0.147051513671875, 0.14275379943847658, 0.14206771850585936, 0.14204620361328124, 0.14201449584960937, 0.14219261169433595, 0.14223974609375, 0.14218853759765626, 0.142202880859375, 0.1419990997314453, 0.14330368041992186, 0.14642994689941408, 0.1418434600830078, 0.1423319091796875, 0.1424486389160156, 0.1481861114501953, 0.145069091796875, 0.14216802978515625, 0.1488527374267578, 0.14834072875976562, 0.14806016540527345, 0.1470699462890625, 0.14825881958007814, 0.1478973388671875, 0.1470586853027344, 0.1465149383544922, 0.1468651580810547, 0.14660096740722656, 0.14620057678222656, 0.28747674560546876, 0.14185987854003906, 0.14227145385742188, 0.14205952453613283, 0.14222848510742186, 0.14239846801757813, 0.14227865600585937, 0.14170317077636718, 0.14202879333496093, 0.14202163696289063, 0.14184652709960938, 0.14195814514160157, 0.14193463134765624, 0.1420113983154297, 0.14294326782226563, 0.14217619323730468, 0.14186393737792968, 0.14214041137695313, 0.1420779571533203, 0.14214247131347657, 0.14200831604003905, 0.14226943969726563, 0.14271385192871094, 0.14213020324707032, 0.14204412841796876, 0.14196633911132814, 0.141591552734375, 0.1420789794921875, 0.14242713928222656, 0.14199501037597656, 0.1419878387451172, 0.1420779571533203, 0.14217625427246094, 0.14183628845214843, 0.14210867309570313, 0.1430630340576172, 0.1449400329589844, 0.14324327087402344, 0.1468098602294922, 0.14377165222167967, 0.1458401336669922, 0.14713446044921874, 0.14582681274414064, 0.14665216064453124, 0.1467658233642578, 0.14667059326171875, 0.14654669189453126, 0.14684979248046875, 0.14678425598144532, 0.14661325073242187, 0.14607052612304688, 0.14693376159667967, 0.1446860809326172, 0.14237184143066406, 0.1426483154296875, 0.14714572143554688, 0.14626815795898437, 0.14899200439453125, 0.14655795288085938, 0.14257254028320313, 0.14196223449707032, 0.14228688049316407, 0.14241789245605468]",tokens/s,6.824732237544431,,,main,False,False,
 
7189
  cls._check_and_enable_flash_attn_2(
7190
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
7191
  raise ValueError(
7192
+ ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpr40amp3b/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
7193
 
7194
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True
7195
  4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
 
7970
  cls._check_and_enable_flash_attn_2(
7971
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
7972
  raise ValueError(
7973
+ ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmptfhqxa5h/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
7974
 
7975
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True
7976
  4bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1278.603264,3276.275712,0.0,2629.828608,2358.099968,s,10,1.363180648803711,0.1363180648803711,0.00026217317487357416,0.1361774597167969,0.13673182220458985,0.1368010154724121,0.13685637008666993,"[0.13687020874023437, 0.1363632354736328, 0.13641459655761717, 0.13614486694335937, 0.1360360565185547, 0.1361745300292969, 0.13614434814453125, 0.13671644592285156, 0.1361803894042969, 0.13613597106933595]",tokens/s,1877.9609307442738,kWh,1.6118631997444966e-06,8.832266388659214e-07,9.71787188840562e-06,1.2212961727016039e-05,tokens/kWh,20961336.465478946,MB,1278.603264,3276.275712,0.0,2629.828608,2358.102528,s,10,29.777363037109374,2.9777363037109374,0.02588726324207309,2.9713291015625,3.018982763671875,3.0193787109375,3.0196954687499997,"[2.9692578125, 2.954369873046875, 2.95029638671875, 2.94651025390625, 2.959674560546875, 3.018894775390625, 2.99993896484375, 2.973400390625, 3.019774658203125, 2.985245361328125]",tokens/s,21.157011089762264,kWh,3.511612323535098e-05,1.924535570923163e-05,7.12104706319921e-05,0.00012557194957657467,tokens/kWh,501704.4030329572,,s,629,30.95603612136841,0.04921468381775582,0.016312179544641713,0.046707710266113284,0.04863692855834961,0.04895887298583985,0.1833139434814453,"[0.04888371276855469, 0.04917145538330078, 0.048249855041503906, 0.04791910552978516, 0.04700057601928711, 0.04656435012817383, 0.04804095840454101, 0.048145408630371096, 0.05266739273071289, 0.052951038360595705, 0.04698521423339844, 0.04662681579589844, 0.04637593460083008, 0.04695449447631836, 0.046437374114990236, 0.04646912002563477, 0.046312446594238284, 0.046415870666503906, 0.046268417358398435, 0.04664115142822266, 0.04859187316894531, 0.048347137451171876, 0.048274433135986325, 0.048266239166259765, 0.04822531127929688, 0.046888927459716796, 0.04660531234741211, 0.046668800354003906, 0.0465428466796875, 0.04667289733886719, 0.04662374496459961, 0.046584831237792966, 0.046617633819580076, 0.04658787155151367, 0.04661043167114258, 0.0464793586730957, 0.04622335815429687, 0.046317569732666014, 0.04641593551635742, 0.046725055694580075, 0.04673023986816406, 0.04653977584838867, 0.046630912780761716, 0.046773246765136715, 0.04663603210449219, 0.046601215362548826, 0.04661043167114258, 0.04664422225952149, 0.0466063346862793, 0.04671692657470703, 0.04665651321411133, 0.04674764633178711, 0.046693374633789066, 0.04663808059692383, 0.04658892822265625, 0.0465797119140625, 0.04658278274536133, 0.046614528656005856, 0.04653158569335938, 0.0465797119140625, 0.04661964797973633, 0.046682113647460936, 0.18365440368652344, 0.046698497772216796, 0.046683135986328124, 0.04674764633178711, 0.04665350341796875, 0.046623680114746095, 0.04651724624633789, 0.04652339172363281, 0.0466165771484375, 0.046535678863525394, 0.04666572952270508, 0.046543872833251954, 0.04715827178955078, 0.049702945709228515, 0.04864508819580078, 0.04698214340209961, 0.04639231872558594, 0.04759756851196289, 0.04860313415527344, 0.04817203140258789, 0.04827340698242188, 0.04671078491210937, 0.04645171356201172, 0.04654489517211914, 0.04678246307373047, 0.046707710266113284, 0.04657766342163086, 0.04651827239990235, 0.04720640182495117, 0.04652032089233398, 0.04651007843017578, 0.04648038482666016, 0.04660019302368164, 0.04666572952270508, 0.04597760009765625, 0.045873153686523435, 0.046465023040771485, 0.04595916748046875, 0.04644659042358398, 0.04680294418334961, 0.0465428466796875, 0.04660639953613281, 0.04682745742797852, 0.04664115142822266, 0.046595073699951174, 0.04654694366455078, 0.04647731018066406, 0.04648857498168945, 0.046524417877197265, 0.046465023040771485, 0.049186817169189455, 0.04894003295898437, 0.047738910675048825, 0.04649980926513672, 0.04653366470336914, 0.046507999420166014, 0.048075775146484374, 0.04660326385498047, 0.04653158569335938, 0.04651827239990235, 0.04651212692260742, 0.046837760925292966, 0.04677119827270508, 0.1835816955566406, 0.04658380889892578, 0.047800319671630856, 0.046117889404296876, 0.046486526489257815, 0.046491649627685545, 0.046429183959960936, 0.04612607955932617, 0.046516223907470705, 0.04642201614379883, 0.0464640007019043, 0.04640972900390625, 0.047303680419921876, 0.04697292709350586, 0.046301185607910154, 0.04644454574584961, 0.04625612640380859, 0.046429183959960936, 0.046516223907470705, 0.04658790588378906, 0.046541854858398436, 0.046540767669677734, 0.04672512054443359, 0.0474337272644043, 0.0465428466796875, 0.04739686584472656, 0.0466165771484375, 0.0465797119140625, 0.046473217010498044, 0.046413822174072264, 0.04655001449584961, 0.04657766342163086, 0.046514175415039063, 0.04658796691894531, 0.04752275085449219, 0.04734975814819336, 0.04658892822265625, 0.04655411148071289, 0.04851200103759765, 0.04827852630615234, 0.0474224624633789, 0.04655001449584961, 0.046489601135253904, 0.046475265502929686, 0.048115711212158206, 0.04731289672851562, 0.046516223907470705, 0.04650188827514649, 0.049426433563232425, 0.04711423873901367, 0.04655104064941406, 0.046489601135253904, 0.04655615997314453, 0.04638924789428711, 0.046486526489257815, 0.04640563201904297, 0.04651212692260742, 0.04775321578979492, 0.04838092803955078, 0.04723814392089844, 0.04639539337158203, 0.04654694366455078, 0.04658790588378906, 0.18326527404785156, 0.04662681579589844, 0.04649369430541992, 0.046584831237792966, 0.04655513763427734, 0.04655923080444336, 0.04653875350952148, 0.04658995056152344, 0.04652851104736328, 0.04656639862060547, 0.04646297454833984, 0.04653363037109375, 0.04658995056152344, 0.046565376281738284, 0.0465530891418457, 0.04670361709594727, 0.046663681030273435, 0.04671897506713867, 0.04660019302368164, 0.04658073425292969, 0.046650367736816405, 0.04658687973022461, 0.04647731018066406, 0.046604286193847655, 0.04630326461791992, 0.046703582763671876, 0.05004185485839844, 0.046838783264160154, 0.04659609603881836, 0.04656435012817383, 0.046630912780761716, 0.046698558807373045, 0.04662172698974609, 0.04658371353149414, 0.0465797119140625, 0.04662579345703125, 0.047288318634033204, 0.048198654174804685, 0.04846694564819336, 0.04801331329345703, 0.04657664108276367, 0.046884864807128904, 0.048734302520751956, 0.046059425354003904, 0.04659616088867188, 0.04658988952636719, 0.04660636901855469, 0.04648137664794922, 0.04628275299072265, 0.046652416229248046, 0.046268417358398435, 0.046714881896972656, 0.04660838317871094, 0.04664934539794922, 0.04652544021606445, 0.04658585739135742, 0.04658995056152344, 0.04665651321411133, 0.0466063346862793, 0.046688255310058595, 0.046693374633789066, 0.046680065155029295, 0.04660224151611328, 0.18300210571289063, 0.04653670501708984, 0.046473217010498044, 0.0461578254699707, 0.046496768951416016, 0.04647423934936523, 0.04640256118774414, 0.04640665435791016, 0.04638003158569336, 0.046358528137207033, 0.04634316635131836, 0.046322689056396485, 0.04616806411743164, 0.046317569732666014, 0.046516223907470705, 0.04638412857055664, 0.04650495910644531, 0.04644454574584961, 0.046458881378173826, 0.04602982330322265, 0.04636876678466797, 0.046475265502929686, 0.046412799835205076, 0.046519294738769534, 0.046527488708496094, 0.04621311950683594, 0.046467071533203126, 0.04649369430541992, 0.04691763305664062, 0.04690124893188476, 0.04652544021606445, 0.046150657653808595, 0.04600428771972656, 0.04650182342529297, 0.04653055953979492, 0.04657766342163086, 0.04649267196655273, 0.04682963180541992, 0.046623680114746095, 0.04650495910644531, 0.04646809768676758, 0.046461952209472655, 0.047339519500732424, 0.048694271087646485, 0.04841471862792969, 0.04851200103759765, 0.04836249542236328, 0.047900672912597655, 0.04835948944091797, 0.048065471649169925, 0.04837171173095703, 0.04834201431274414, 0.04732108688354492, 0.047524864196777344, 0.0466063346862793, 0.04696985626220703, 0.047857662200927735, 0.048399360656738284, 0.048811008453369144, 0.047870975494384765, 0.04825497436523438, 0.048231422424316404, 0.04836454391479492, 0.18703053283691407, 0.04863692855834961, 0.048642047882080076, 0.04825190353393555, 0.048363521575927736, 0.04759347152709961, 0.048571392059326174, 0.048143360137939455, 0.04746547317504883, 0.04823654556274414, 0.048881664276123046, 0.048694271087646485, 0.04863692855834961, 0.04817715072631836, 0.04658790588378906, 0.04653055953979492, 0.04757913589477539, 0.0470302734375, 0.04621004867553711, 0.04739891052246094, 0.04717567825317383, 0.047252479553222655, 0.04823040008544922, 0.04765497589111328, 0.04656838226318359, 0.04729753494262695, 0.04845977783203125, 0.04829695892333984, 0.04857958221435547, 0.048396289825439455, 0.048353279113769534, 0.04774604797363281, 0.04651724624633789, 0.0465428466796875, 0.04867177581787109, 0.04863484954833985, 0.046604286193847655, 0.047421440124511716, 0.047592449188232425, 0.046993408203125, 0.04728115081787109, 0.0483164176940918, 0.04786175918579102, 0.04827238464355469, 0.04739788818359375, 0.04658892822265625, 0.047234046936035154, 0.04739891052246094, 0.046611457824707034, 0.04801126480102539, 0.048333824157714846, 0.04657664108276367, 0.04751564788818359, 0.0487014389038086, 0.048292865753173826, 0.04835635375976562, 0.04862259292602539, 0.048143360137939455, 0.049186817169189455, 0.04864614486694336, 0.04837887954711914, 0.04873011016845703, 0.05090409469604492, 0.1834936065673828, 0.04677836990356445, 0.046693374633789066, 0.046698497772216796, 0.046824447631835936, 0.04659302520751953, 0.04657561492919922, 0.046635009765625, 0.04619468688964844, 0.04654694366455078, 0.048099327087402347, 0.04747673416137695, 0.04760268783569336, 0.04588032150268555, 0.04759347152709961, 0.048570369720458986, 0.048827392578125, 0.047435840606689456, 0.047363006591796875, 0.047933441162109375, 0.04711936187744141, 0.04856524658203125, 0.047963134765625, 0.04738457489013672, 0.04847923278808594, 0.0484771842956543, 0.04849356842041016, 0.04822220611572266, 0.047645694732666014, 0.04873932647705078, 0.04708966445922851, 0.04839321517944336, 0.048563201904296874, 0.04717977523803711, 0.046927871704101565, 0.046620670318603515, 0.046639102935791016, 0.04680089569091797, 0.047659008026123044, 0.047280128479003904, 0.05217792129516602, 0.04737740707397461, 0.0483133430480957, 0.04905779266357422, 0.04962508773803711, 0.04874444961547852, 0.048589824676513675, 0.04659814453125, 0.04643532943725586, 0.04657664108276367, 0.04866457748413086, 0.04680908966064453, 0.04842291259765625, 0.047442943572998046, 0.04648448181152344, 0.04641689682006836, 0.047323135375976565, 0.048643070220947264, 0.04859187316894531, 0.0487823371887207, 0.047777793884277345, 0.04702310562133789, 0.04630220794677734, 0.1877821502685547, 0.04894412612915039, 0.04664115142822266, 0.04641177749633789, 0.04643430328369141, 0.04662579345703125, 0.046532608032226565, 0.04658790588378906, 0.04613836669921875, 0.046475265502929686, 0.046382080078125, 0.046450687408447267, 0.04670873641967774, 0.04652134323120117, 0.046268417358398435, 0.045902847290039066, 0.046025726318359376, 0.04748185729980469, 0.04684902572631836, 0.046461952209472655, 0.047372287750244144, 0.04662169647216797, 0.04873625564575195, 0.04875161743164062, 0.04831545639038086, 0.04846995162963867, 0.046698497772216796, 0.04655411148071289, 0.04661248016357422, 0.04654079818725586, 0.04655001449584961, 0.046561279296875, 0.04655411148071289, 0.04647423934936523, 0.04652339172363281, 0.0464824333190918, 0.04624998474121094, 0.04660940933227539, 0.04664524841308594, 0.04669747161865234, 0.04654489517211914, 0.04839321517944336, 0.04714393615722656, 0.046601215362548826, 0.048126976013183595, 0.048192512512207034, 0.046617599487304685, 0.04764876937866211, 0.04862464141845703, 0.048363521575927736, 0.04870355224609375, 0.04846380615234375, 0.048282623291015625, 0.04703539276123047, 0.047862785339355465, 0.047541248321533204, 0.04650393676757812, 0.046617599487304685, 0.046676990509033206, 0.048495616912841794, 0.04905779266357422, 0.04838915252685547, 0.046614494323730465, 0.18333287048339844, 0.04656025695800781, 0.046601215362548826, 0.04660838317871094, 0.04676095962524414, 0.04911718368530273, 0.04858879852294922, 0.04851923370361328, 0.049417152404785156, 0.04852633666992188, 0.048494590759277346, 0.04827033615112305, 0.050447425842285155, 0.04946220779418945, 0.04804710388183594, 0.0470384635925293, 0.048968704223632815, 0.0485928955078125, 0.047710208892822265, 0.04654079818725586, 0.04716032028198242, 0.04854988861083984, 0.048535552978515625, 0.04889395141601562, 0.048568321228027345, 0.04860927963256836, 0.048935935974121096, 0.04853964614868164, 0.04840755081176758, 0.04842905426025391, 0.04652134323120117, 0.04660326385498047, 0.047483905792236325, 0.04876697540283203, 0.04867993545532227, 0.04855295944213867, 0.048557056427001956, 0.04691046524047852, 0.04793552017211914, 0.04787401580810547, 0.04848332977294922, 0.04837273788452148, 0.048336894989013675, 0.048418815612792966, 0.04837376022338867, 0.047164417266845705, 0.04696780776977539, 0.04809318542480469, 0.04653875350952148, 0.046362686157226565, 0.04724319839477539, 0.04828672027587891, 0.04692582321166992, 0.04737945556640625, 0.048061439514160156, 0.04743270492553711, 0.047702014923095705, 0.048449569702148435, 0.04834406280517578, 0.048519134521484375, 0.04654182434082031, 0.04656639862060547, 0.047287296295166016, 0.18399539184570313, 0.046808063507080076, 0.04854272079467774, 0.048263168334960936, 0.04669852828979492, 0.04928508758544922, 0.046868480682373044, 0.046581760406494144, 0.046752769470214846, 0.05028966522216797, 0.048942081451416014, 0.0466247673034668, 0.04674150466918945, 0.046622718811035156, 0.046601215362548826, 0.04654489517211914, 0.04664012908935547, 0.04781158447265625, 0.04701696014404297, 0.04669235229492188, 0.04635340881347656, 0.04624076843261719, 0.0465797119140625, 0.04681216049194336, 0.04678041458129883, 0.04680908966064453, 0.048568321228027345, 0.04837887954711914, 0.048671745300292966, 0.04843622589111328, 0.04868096160888672, 0.04844339370727539, 0.04698316955566406, 0.04852121734619141, 0.046637054443359374, 0.04786380767822265, 0.04649683380126953, 0.04812998580932617, 0.0482979850769043, 0.046993408203125, 0.04710297775268555, 0.04897587203979492, 0.04673535919189453, 0.046611457824707034, 0.046875648498535157, 0.046709758758544925, 0.04660940933227539, 0.046548992156982424, 0.04666777420043945, 0.046676990509033206, 0.046685184478759766, 0.04659609603881836, 0.046706687927246096, 0.04922675323486328, 0.04920729446411133, 0.04852019119262695, 0.04854272079467774, 0.048086017608642576, 0.046620670318603515, 0.0466063346862793, 0.046553150177001956, 0.046623680114746095, 0.04688281631469727]",tokens/s,20.319138972893636,,,main,False,False,True
 
8274
  hf_raise_for_status(response)
8275
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
8276
  raise HfHubHTTPError(message, response=response) from e
8277
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a147c-28f2f8281101ce6307d72082;71753a12-e6db-43fd-ac89-baa4f2857d40)
8278
 
8279
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
8280
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
 
8427
  cls._check_and_enable_flash_attn_2(
8428
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
8429
  raise ValueError(
8430
+ ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpizgvshnd/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
8431
 
8432
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True
8433
  4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
 
8469
  hf_raise_for_status(response)
8470
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
8471
  raise RepositoryNotFoundError(message, response) from e
8472
+ huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-667a14a7-3ed4d3b372956296714dcfbd;0917ebf3-68c7-4b75-823a-b360ecb627d4)
8473
 
8474
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
8475
  Please make sure you specified the correct `repo_id` and `repo_type`.
 
8530
  hf_raise_for_status(response)
8531
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
8532
  raise HfHubHTTPError(message, response=response) from e
8533
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1423-3a05a6ed01fd4fa737d4fe18;0a2e8f1f-9481-4297-9efd-689ab50f0e80)
8534
 
8535
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
8536
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
 
8708
  hf_raise_for_status(response)
8709
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
8710
  raise HfHubHTTPError(message, response=response) from e
8711
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a144f-39c6357d71d932434951e621;7b2878e1-fe10-430a-b2ab-4e18482f18c1)
8712
 
8713
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
8714
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
 
8958
  cls._check_and_enable_flash_attn_2(
8959
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
8960
  raise ValueError(
8961
+ ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpltr74um5/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
8962
 
8963
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True
8964
  4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.218-208.862.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,"Traceback (most recent call last):
 
9280
  hf_raise_for_status(response)
9281
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
9282
  raise HfHubHTTPError(message, response=response) from e
9283
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a11e6-6aa9e69e39179e873a99ce8b;a4db5d84-1913-4eba-bec1-6011eef99989)
9284
 
9285
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
9286
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
 
9470
  cls._check_and_enable_flash_attn_2(
9471
  File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2
9472
  raise ValueError(
9473
+ ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpemm2s4ow/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new
9474
 
9475
  ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True
9476
  4bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,2088.595456,7149.715456,0.0,6503.268352,6130.0736,s,10,4.354886688232422,0.43548866882324216,0.0009500705262691482,0.43506301879882814,0.4360664276123047,0.43712642974853516,0.4379744314575195,"[0.43818643188476564, 0.4357357177734375, 0.43495596313476564, 0.43503363037109377, 0.43495343017578125, 0.4350522766113281, 0.43583087158203127, 0.43494912719726564, 0.4350737609863281, 0.435115478515625]",tokens/s,587.8453753842819,kWh,5.1466148040720816e-06,2.82011353963595e-06,3.132101539495727e-05,3.92877437386653e-05,tokens/kWh,6516027.026211126,MB,2088.595456,7149.715456,0.0,6503.268352,6130.07616,s,10,52.50473486328125,5.250473486328125,0.09201662696971703,5.295069091796875,5.3397248046875,5.347057958984375,5.352924482421875,"[5.33809521484375, 5.3243525390625, 5.32192578125, 5.2103232421875, 5.2842451171875, 5.35439111328125, 5.30589306640625, 5.11702490234375, 5.11619873046875, 5.13228515625]",tokens/s,11.998917843133139,kWh,6.21844898056293e-05,3.408169959179007e-05,0.00014644368237224062,0.00024270987176965998,tokens/kWh,259569.1701398498,,s,629,56.34009081268308,0.08957089159409079,0.05164629418121464,0.08460291290283203,0.08540467224121094,0.08579256439208983,0.5164155053710938,"[0.0808826904296875, 0.08587264251708984, 0.08512409973144532, 0.08462745666503907, 0.083346435546875, 0.08475443267822266, 0.08069427490234375, 0.08058879852294921, 0.08235929870605468, 0.08576921844482421, 0.08510054779052735, 0.0855224609375, 0.08536675262451172, 0.08540262603759766, 0.085212158203125, 0.08498175811767578, 0.08550911712646485, 0.08472882843017578, 0.0831272964477539, 0.08487731170654297, 0.08553369903564453, 0.08621772766113281, 0.08491315460205077, 0.08398233795166016, 0.08506777954101563, 0.08603961944580078, 0.08499603271484375, 0.0849438705444336, 0.08485478210449218, 0.08516095733642579, 0.08507596588134765, 0.08500531005859376, 0.08534015655517578, 0.08566169738769532, 0.0850708465576172, 0.08518041229248047, 0.08493363189697266, 0.08562995147705078, 0.08498790740966797, 0.08498585510253906, 0.08526950073242187, 0.08520499420166015, 0.08501145935058593, 0.08523878479003906, 0.082661376953125, 0.0845854721069336, 0.08474726104736328, 0.0852326431274414, 0.08525414276123047, 0.08547122955322266, 0.08509030151367188, 0.08530841827392578, 0.08540364837646484, 0.08494489288330079, 0.08497561645507813, 0.08481587219238282, 0.08475341033935548, 0.08502579498291016, 0.08509542083740235, 0.08502067565917969, 0.08512102508544922, 0.08473705291748047, 0.5177200317382813, 0.08092364501953125, 0.08514765167236328, 0.0842577896118164, 0.08333106994628907, 0.08667545318603516, 0.08635289764404297, 0.08515583801269531, 0.08509849548339844, 0.0846909408569336, 0.08494908905029297, 0.08473385620117188, 0.08456703948974609, 0.08469811248779296, 0.08491417694091796, 0.08470425415039062, 0.080932861328125, 0.08101580810546875, 0.08081203460693359, 0.08047824096679687, 0.08428540802001953, 0.0845998077392578, 0.08174591827392579, 0.08479027557373046, 0.08491417694091796, 0.08499625396728516, 0.08498262023925782, 0.08467250823974609, 0.08499609375, 0.0854128646850586, 0.08483328247070313, 0.08498381042480468, 0.08468889617919922, 0.08487423706054688, 0.08468582153320313, 0.0848015365600586, 0.0852674560546875, 0.08585830688476563, 0.08500326538085938, 0.08540876770019531, 0.08543743896484375, 0.08685164642333984, 0.08631084442138671, 0.08498073577880859, 0.08495001220703124, 0.08483430480957031, 0.08489369964599609, 0.08494694519042968, 0.08446463775634766, 0.08407654571533203, 0.08470425415039062, 0.08453324890136718, 0.08203366088867188, 0.08531148529052734, 0.08482201385498046, 0.08499097442626953, 0.08538829040527343, 0.08473804473876953, 0.08488960266113281, 0.08508415985107422, 0.08515789031982422, 0.08494287872314453, 0.08479126739501953, 0.5212528686523438, 0.084890625, 0.08508108520507812, 0.08611840057373046, 0.08516403198242188, 0.08458956909179688, 0.08463667297363281, 0.08557878112792969, 0.08558902740478516, 0.08462841796875, 0.08482508850097656, 0.08524390411376953, 0.0847267837524414, 0.08469811248779296, 0.08470425415039062, 0.08479539489746094, 0.08464895629882813, 0.08452607727050782, 0.08485379028320313, 0.08477590179443359, 0.08533299255371093, 0.08490598297119141, 0.0849090576171875, 0.08484352111816407, 0.08475955200195312, 0.080753662109375, 0.08075059509277344, 0.08288256072998047, 0.08464486694335938, 0.08470015716552734, 0.08492134094238281, 0.08503501129150391, 0.08485887908935547, 0.08471449279785156, 0.084569091796875, 0.08482099151611328, 0.08460594940185547, 0.08432537841796875, 0.08461209869384766, 0.0847984619140625, 0.0846192626953125, 0.08486605072021484, 0.08466534423828125, 0.0845301742553711, 0.08466636657714843, 0.08366899108886719, 0.08472576141357421, 0.08461824035644532, 0.08455577850341797, 0.08461619567871094, 0.08433561706542969, 0.0844554214477539, 0.08434893035888671, 0.08446463775634766, 0.08080691528320312, 0.08115507507324218, 0.08446771240234376, 0.08471449279785156, 0.084748291015625, 0.08502272033691406, 0.08421692657470703, 0.08310678100585937, 0.08451372528076172, 0.5164462280273437, 0.08081817626953125, 0.08075878143310547, 0.08023859405517578, 0.08031231689453125, 0.0796651840209961, 0.08062461090087891, 0.08074342346191406, 0.08057036590576172, 0.08049049377441406, 0.08030207824707031, 0.08085913848876954, 0.08074854278564453, 0.08060313415527344, 0.0807567367553711, 0.08064617919921875, 0.08057341003417968, 0.0805775375366211, 0.08061542510986328, 0.08045158386230469, 0.08406835174560547, 0.08073932647705079, 0.08241868591308593, 0.08514765167236328, 0.08534220886230469, 0.08399565124511718, 0.08037580871582031, 0.08123699188232422, 0.08072294616699219, 0.08092572784423828, 0.08314777374267578, 0.08090415954589844, 0.08274329376220703, 0.08470527648925781, 0.0807874526977539, 0.08084786987304687, 0.08369868469238281, 0.08458444976806641, 0.08475750732421874, 0.08480563354492188, 0.08099635314941406, 0.08079974365234376, 0.08065535736083984, 0.08275865936279297, 0.08473603057861329, 0.08466326141357422, 0.0848384017944336, 0.08517120361328125, 0.08535552215576171, 0.08433663940429688, 0.08464588928222656, 0.08481587219238282, 0.08481484985351563, 0.08510975646972656, 0.08468275451660157, 0.0861143035888672, 0.08548454284667968, 0.08472064208984376, 0.08493055725097656, 0.08681574249267578, 0.08619725036621094, 0.08521932983398438, 0.08474726104736328, 0.5163919067382813, 0.08359117126464843, 0.08464691162109375, 0.08525004577636719, 0.08071167755126953, 0.0807608642578125, 0.08082121276855468, 0.08076799774169922, 0.08060006713867188, 0.08175001525878907, 0.08488243103027343, 0.08472166442871094, 0.08394445037841797, 0.08496435546875, 0.0847267837524414, 0.08517120361328125, 0.08457526397705079, 0.08479126739501953, 0.08574156951904296, 0.08485990142822265, 0.08481382751464844, 0.08460291290283203, 0.08464790344238281, 0.08110489654541016, 0.08444927978515625, 0.08513843536376953, 0.08490290832519531, 0.08127078247070313, 0.08087654113769531, 0.08389324951171875, 0.0847083511352539, 0.08498381042480468, 0.08485068511962891, 0.08423526763916016, 0.08490918731689454, 0.08473484802246094, 0.08484044647216797, 0.08504319763183593, 0.0810618896484375, 0.0811878433227539, 0.08054271697998047, 0.0806123504638672, 0.0807034912109375, 0.08408370971679688, 0.08534937286376953, 0.08435711669921875, 0.08488448333740234, 0.08481177520751954, 0.08471552276611329, 0.08489574432373047, 0.08478822326660156, 0.0851937255859375, 0.08481692504882812, 0.08490595245361328, 0.08488140869140624, 0.08486809539794922, 0.08466534423828125, 0.08551526641845703, 0.0852490234375, 0.08489266967773437, 0.08475852966308593, 0.08525721740722657, 0.0845660171508789, 0.5165946655273438, 0.08096256256103515, 0.08067686462402343, 0.08383283233642579, 0.08545587158203125, 0.08592691040039062, 0.08542105865478515, 0.08580812835693359, 0.08653107452392578, 0.08479027557373046, 0.08508006286621093, 0.08506777954101563, 0.08509951782226563, 0.08516403198242188, 0.08509235382080078, 0.08480870056152344, 0.08497872161865234, 0.0852776641845703, 0.08520703887939453, 0.08543846130371094, 0.08479027557373046, 0.08502579498291016, 0.08532685089111328, 0.08513433837890624, 0.08513228607177735, 0.0849991683959961, 0.08475341033935548, 0.08500121307373047, 0.08525004577636719, 0.08603238677978516, 0.0857149429321289, 0.08504524993896484, 0.08503193664550782, 0.08063385772705078, 0.08257945251464843, 0.08591462707519532, 0.08676454162597656, 0.08414208221435547, 0.08625151824951172, 0.0863109130859375, 0.085501953125, 0.08678092956542968, 0.08597503662109375, 0.0852490234375, 0.0853780517578125, 0.08530944061279297, 0.08511385345458984, 0.08488550567626953, 0.0850882568359375, 0.08629043579101563, 0.08513536071777343, 0.08496640014648438, 0.08540364837646484, 0.08508108520507812, 0.0853729248046875, 0.08491827392578125, 0.08530226898193359, 0.08532994842529297, 0.08504521942138672, 0.0850882568359375, 0.08540160369873047, 0.08498381042480468, 0.08480668640136718, 0.5231185302734375, 0.08555007934570312, 0.085212158203125, 0.0849991683959961, 0.08495820617675781, 0.08518758392333985, 0.08489881896972656, 0.08519577789306641, 0.08499404907226563, 0.0853544921875, 0.08507087707519531, 0.08468988800048828, 0.0851773452758789, 0.08512409973144532, 0.08484146881103516, 0.08553472137451172, 0.08454454040527344, 0.08496841430664062, 0.08502374267578125, 0.08565964508056641, 0.08568627166748047, 0.0853903350830078, 0.0851230697631836, 0.0851251220703125, 0.0851600341796875, 0.08550185394287109, 0.08520089721679687, 0.08487014770507813, 0.08488038635253906, 0.08486502075195312, 0.08505958557128906, 0.08553062438964844, 0.08553369903564453, 0.08480973052978516, 0.08504524993896484, 0.08505241394042969, 0.08521318054199219, 0.08505753326416016, 0.08548044586181641, 0.08534835052490235, 0.08536576080322265, 0.08545996856689453, 0.08545996856689453, 0.08539647674560547, 0.08540057373046875, 0.08492031860351562, 0.08551526641845703, 0.08133529663085938, 0.08056217956542969, 0.08036351776123046, 0.08066969299316407, 0.08025395202636719, 0.08039628601074218, 0.08287641906738281, 0.08132608032226563, 0.08082329559326172, 0.08066867065429688, 0.0822999038696289, 0.08375296020507812, 0.08239103698730468, 0.08026112365722657, 0.08034611511230469, 0.08190873718261718, 0.5161922607421875, 0.08048332977294922, 0.08032358551025391, 0.08309555053710938, 0.08317644500732421, 0.0805580825805664, 0.08210636901855468, 0.08081407928466797, 0.08045260620117188, 0.08023040008544922, 0.08245350646972656, 0.08249139404296875, 0.08060006713867188, 0.08049459075927734, 0.07992115020751953, 0.08049977874755859, 0.08055289459228515, 0.08052326202392578, 0.08077516937255859, 0.08043520355224609, 0.0833034210205078, 0.08294092559814453, 0.08160768127441406, 0.08223337554931641, 0.08221385955810546, 0.08159744262695312, 0.08116838073730469, 0.08049049377441406, 0.08033586883544921, 0.08034918212890625, 0.08060214233398437, 0.08028873443603515, 0.08058675384521484, 0.08057036590576172, 0.08030413055419922, 0.08308633422851562, 0.08312934112548828, 0.08227123260498047, 0.08059187316894531, 0.08236550140380859, 0.08107414245605468, 0.08041264343261718, 0.08030416107177735, 0.08019350433349609, 0.0805038070678711, 0.08295935821533203, 0.08059903717041016, 0.0819609603881836, 0.08271769714355469, 0.08190156555175782, 0.08018841552734375, 0.08054988861083984, 0.08026624298095703, 0.08369356536865234, 0.08052531433105468, 0.08051507568359376, 0.08030719757080078, 0.08242793273925782, 0.08046998596191406, 0.08084377288818359, 0.08229580688476562, 0.08067276763916016, 0.08043110656738281, 0.5164246826171875, 0.08162815856933593, 0.08057965087890626, 0.08042387390136718, 0.08143666839599609, 0.08144281768798828, 0.08223232269287109, 0.08032972717285156, 0.0812779541015625, 0.08238285064697265, 0.08033280181884765, 0.08039218902587891, 0.0803594207763672, 0.08050176239013672, 0.0800552978515625, 0.08041165161132813, 0.080468994140625, 0.08084172821044922, 0.08128614044189453, 0.08042393493652343, 0.08206028747558594, 0.08061952209472656, 0.08051507568359376, 0.0803420181274414, 0.08279654693603515, 0.08056729888916016, 0.08061548614501954, 0.080133056640625, 0.08056626892089844, 0.08235724639892578, 0.08163430023193359, 0.08038706970214844, 0.08184012603759766, 0.08037785339355469, 0.0805027847290039, 0.08062770843505859, 0.0814172134399414, 0.08300748443603516, 0.08400902557373047, 0.08350918579101563, 0.08451481628417969, 0.0817254409790039, 0.08179814147949219, 0.08327993774414062, 0.08242066955566406, 0.08332598114013672, 0.08047100830078124, 0.08046387481689453, 0.08042803192138671, 0.08188108825683593, 0.08300032043457031, 0.08036966705322265, 0.08046489715576172, 0.08080178833007813, 0.08087039947509765, 0.08029388427734375, 0.08035430145263672, 0.08065331268310547, 0.08069631958007813, 0.08063385772705078, 0.08074342346191406, 0.08058367919921874, 0.08041881561279297, 0.5169356689453125, 0.080574462890625, 0.08031948852539063, 0.08043929290771484, 0.08171826934814454, 0.0848189468383789, 0.08166194915771484, 0.08131072235107421, 0.08058777618408203, 0.08282316589355469, 0.08184524536132813, 0.08173056030273437, 0.08203263854980469, 0.08320511627197266, 0.08302694702148437, 0.08298188781738282, 0.08223846435546875, 0.08015769958496094, 0.08293170928955078, 0.08339968109130859, 0.08318873596191406, 0.08285183715820313, 0.08314470672607421, 0.0827658233642578, 0.08057651519775391, 0.08049561309814453, 0.08271158599853516, 0.08238076782226562, 0.08045980834960938, 0.08164553833007812, 0.08335871887207032, 0.08263782501220703, 0.08058477020263671, 0.08046176147460937, 0.08078028869628906, 0.08137318420410156, 0.08054271697998047, 0.08029901123046874, 0.08063180541992188, 0.08216371154785156, 0.08048230743408204, 0.0804874267578125, 0.08042700958251953, 0.08028467559814453, 0.08053555297851563, 0.08054886627197265, 0.08183500671386719, 0.08184320068359376, 0.0805406723022461, 0.08033177947998046, 0.08072908782958985, 0.08007577514648437, 0.08035020446777344, 0.08032051086425782, 0.08077721405029296, 0.08069427490234375, 0.08040857696533203, 0.07999078369140625, 0.08025190734863281, 0.08065843200683594, 0.08293888092041016, 0.08307711791992188, 0.08205107116699219]",tokens/s,11.164341251974012,,,main,False,False,True
 
10693
  hf_raise_for_status(response)
10694
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
10695
  raise HfHubHTTPError(message, response=response) from e
10696
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1475-18ffd84467cf43317125a57b;29e873f8-643e-4ccb-acd7-e588bc21af16)
10697
 
10698
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
10699
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
 
10856
  hf_raise_for_status(response)
10857
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
10858
  raise RepositoryNotFoundError(message, response) from e
10859
+ huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-667a14a0-497418973ae7848a2eaf9ad2;e9c0db56-7055-491c-9459-5ca0a67c31e1)
10860
 
10861
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
10862
  Please make sure you specified the correct `repo_id` and `repo_type`.
 
10917
  hf_raise_for_status(response)
10918
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
10919
  raise HfHubHTTPError(message, response=response) from e
10920
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a141c-3f58184b3a2a11f6226d5c2c;b2ec6eec-00bd-43d1-b81e-70e76e68fcf0)
10921
 
10922
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
10923
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
 
11121
  hf_raise_for_status(response)
11122
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
11123
  raise HfHubHTTPError(message, response=response) from e
11124
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1448-58ebdded098043b83cc679be;19f17ad1-4d92-48f0-8b3a-a99310e580a8)
11125
 
11126
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
11127
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
 
11974
  hf_raise_for_status(response)
11975
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
11976
  raise HfHubHTTPError(message, response=response) from e
11977
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a11e0-560eb506098661ba79a9fba9;802e6971-5407-4421-a4ab-5781759096ed)
11978
 
11979
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
11980
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.
 
13417
  hf_raise_for_status(response)
13418
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
13419
  raise HfHubHTTPError(message, response=response) from e
13420
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a145e-6e0895e16a95a5b86c691146;45713311-6360-46eb-a5c8-113be785f60a)
13421
 
13422
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
13423
  Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json.
 
13584
  hf_raise_for_status(response)
13585
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status
13586
  raise RepositoryNotFoundError(message, response) from e
13587
+ huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-667a148c-17d78e836c69adb17f340647;a6738cb0-6eb0-4a12-82bf-33614206c1f6)
13588
 
13589
  Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json.
13590
  Please make sure you specified the correct `repo_id` and `repo_type`.
 
13645
  hf_raise_for_status(response)
13646
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
13647
  raise HfHubHTTPError(message, response=response) from e
13648
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1402-6578d57a437420da73de70f3;605aa99c-5115-4acb-b8bd-e39a69ddfb2c)
13649
 
13650
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
13651
  Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json.
 
13823
  hf_raise_for_status(response)
13824
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
13825
  raise HfHubHTTPError(message, response=response) from e
13826
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a1433-660599cc454b416e16345607;140a84dc-93f4-4a1e-b024-d76f74de4cc7)
13827
 
13828
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
13829
  Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json.
 
14339
  hf_raise_for_status(response)
14340
  File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status
14341
  raise HfHubHTTPError(message, response=response) from e
14342
+ huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-667a11cb-30f88d2752cba3822f98d14f;09265190-11c9-4daf-a54d-f959f5356f85)
14343
 
14344
  403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..
14345
  Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json.