IlyasMoutawwakil HF staff commited on
Commit
e753d46
·
verified ·
1 Parent(s): 8f43d4e

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.2.0.dev20231010+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
@@ -103,7 +103,7 @@
103
  "optimum_commit": null,
104
  "timm_version": "1.0.9",
105
  "timm_commit": null,
106
- "peft_version": "0.12.0",
107
  "peft_commit": null
108
  }
109
  },
@@ -111,24 +111,24 @@
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 956.70272,
115
  "max_global_vram": 68702.69952,
116
- "max_process_vram": 51799.769088,
117
  "max_reserved": 555.74528,
118
  "max_allocated": 499.374592
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
- "total": 14.376361328125,
124
- "mean": 14.376361328125,
125
  "stdev": 0.0,
126
- "p50": 14.376361328125,
127
- "p90": 14.376361328125,
128
- "p95": 14.376361328125,
129
- "p99": 14.376361328125,
130
  "values": [
131
- 14.376361328125
132
  ]
133
  },
134
  "throughput": null,
@@ -138,157 +138,150 @@
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 1083.510784,
142
  "max_global_vram": 68702.69952,
143
- "max_process_vram": 250262.605824,
144
  "max_reserved": 555.74528,
145
  "max_allocated": 499.4432
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 129,
150
- "total": 1.0015675950050356,
151
- "mean": 0.0077640898837599654,
152
- "stdev": 0.0002461885232652437,
153
- "p50": 0.007728496074676514,
154
- "p90": 0.00803633689880371,
155
- "p95": 0.0081388973236084,
156
- "p99": 0.008609074745178222,
157
  "values": [
158
- 0.007872976779937745,
159
- 0.007894096851348877,
160
- 0.00810545825958252,
161
- 0.008127376556396485,
162
- 0.008183697700500488,
163
- 0.008060497283935547,
164
- 0.008027378082275391,
165
- 0.00800689697265625,
166
- 0.008245457649230957,
167
- 0.00788785696029663,
168
- 0.007896017074584961,
169
- 0.007847696781158446,
170
- 0.007828176975250244,
171
- 0.007693777084350586,
172
- 0.007692815780639649,
173
- 0.007690415859222412,
174
- 0.007688497066497803,
175
- 0.007675055980682373,
176
- 0.007761137008666992,
177
- 0.007706737041473389,
178
- 0.0076558561325073245,
179
- 0.00767937707901001,
180
- 0.007628496170043945,
181
- 0.007641295909881592,
182
- 0.007704017162322998,
183
- 0.007666575908660889,
184
- 0.0076672167778015134,
185
- 0.007648816108703613,
186
- 0.007648976802825927,
187
- 0.00777489709854126,
188
- 0.007728496074676514,
189
- 0.007770256996154785,
190
- 0.00774321699142456,
191
- 0.0077585768699646,
192
- 0.007719696044921875,
193
- 0.00894257926940918,
194
- 0.008348017692565919,
195
- 0.00795393705368042,
196
- 0.007996336936950683,
197
- 0.007941617965698242,
198
- 0.008038577079772949,
199
- 0.007974096775054932,
200
- 0.007903057098388672,
201
- 0.007867537021636962,
202
- 0.007766897201538086,
203
- 0.007764976978302002,
204
- 0.0078104171752929686,
205
- 0.00774273681640625,
206
- 0.007695695877075195,
207
- 0.007681937217712403,
208
- 0.007843697071075439,
209
- 0.007730096817016602,
210
- 0.007772975921630859,
211
- 0.00782449722290039,
212
- 0.007766417026519775,
213
- 0.007705296993255615,
214
- 0.007616176128387451,
215
- 0.007658417224884033,
216
- 0.0075800161361694336,
217
- 0.007597776889801025,
218
- 0.007607855796813965,
219
- 0.007596977233886719,
220
- 0.007783696174621582,
221
- 0.007943697929382324,
222
- 0.007936017036437988,
223
- 0.008009456634521484,
224
- 0.007941297054290771,
225
- 0.008146577835083008,
226
- 0.00782449722290039,
227
- 0.0077755370140075684,
228
- 0.007752975940704346,
229
- 0.007932016849517822,
230
- 0.007932656764984131,
231
- 0.00776241683959961,
232
- 0.0078038558959960935,
233
- 0.007739216804504395,
234
- 0.007670577049255371,
235
- 0.008036176681518554,
236
- 0.008412657737731933,
237
- 0.007782257080078125,
238
- 0.007890576839447022,
239
- 0.007868496894836426,
240
- 0.007896337032318115,
241
- 0.008036977767944336,
242
- 0.008013136863708496,
243
- 0.007996976852416992,
244
- 0.00868545913696289,
245
- 0.00792289686203003,
246
- 0.007643217086791992,
247
- 0.00772049617767334,
248
- 0.007613457202911377,
249
- 0.007574575901031494,
250
- 0.007592977046966553,
251
- 0.0075878558158874515,
252
- 0.007624337196350098,
253
- 0.007514256000518799,
254
- 0.007572336196899414,
255
- 0.0075236959457397464,
256
- 0.007522575855255127,
257
- 0.007496016025543213,
258
- 0.007508975982666016,
259
- 0.0074832158088684085,
260
- 0.007519536018371582,
261
- 0.007649456024169922,
262
- 0.007608976840972901,
263
- 0.007503695964813232,
264
- 0.007507376194000244,
265
- 0.007512656211853028,
266
- 0.007494895935058594,
267
- 0.007498415946960449,
268
- 0.007519696235656738,
269
- 0.0074430561065673825,
270
- 0.007533616065979004,
271
- 0.00752017593383789,
272
- 0.0074876961708068845,
273
- 0.007541296005249024,
274
- 0.007479537010192871,
275
- 0.0074832158088684085,
276
- 0.007259854793548584,
277
- 0.007119534969329834,
278
- 0.007463056087493897,
279
- 0.0075494561195373535,
280
- 0.007539697170257568,
281
- 0.0076985759735107425,
282
- 0.007750096797943115,
283
- 0.008111698150634766,
284
- 0.007689136028289795,
285
- 0.007701137065887451,
286
- 0.007851215839385986
287
  ]
288
  },
289
  "throughput": {
290
  "unit": "samples/s",
291
- "value": 128.7980967468815
292
  },
293
  "energy": null,
294
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
 
103
  "optimum_commit": null,
104
  "timm_version": "1.0.9",
105
  "timm_commit": null,
106
+ "peft_version": null,
107
  "peft_commit": null
108
  }
109
  },
 
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
+ "max_ram": 1032.654848,
115
  "max_global_vram": 68702.69952,
116
+ "max_process_vram": 45683.843072,
117
  "max_reserved": 555.74528,
118
  "max_allocated": 499.374592
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
+ "total": 14.200427734375,
124
+ "mean": 14.200427734375,
125
  "stdev": 0.0,
126
+ "p50": 14.200427734375,
127
+ "p90": 14.200427734375,
128
+ "p95": 14.200427734375,
129
+ "p99": 14.200427734375,
130
  "values": [
131
+ 14.200427734375
132
  ]
133
  },
134
  "throughput": null,
 
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
+ "max_ram": 1149.919232,
142
  "max_global_vram": 68702.69952,
143
+ "max_process_vram": 224219.160576,
144
  "max_reserved": 555.74528,
145
  "max_allocated": 499.4432
146
  },
147
  "latency": {
148
  "unit": "s",
149
+ "count": 122,
150
+ "total": 0.9953399045467377,
151
+ "mean": 0.008158523807760144,
152
+ "stdev": 0.0012453671496843836,
153
+ "p50": 0.008072498321533204,
154
+ "p90": 0.008253460216522217,
155
+ "p95": 0.008353706836700439,
156
+ "p99": 0.008441189279556274,
157
  "values": [
158
+ 0.008227218627929687,
159
+ 0.00814897918701172,
160
+ 0.008128337860107422,
161
+ 0.00809025764465332,
162
+ 0.008096818923950195,
163
+ 0.008100659370422363,
164
+ 0.008444179534912109,
165
+ 0.00820241928100586,
166
+ 0.008162259101867676,
167
+ 0.008123858451843262,
168
+ 0.00810561752319336,
169
+ 0.008086579322814941,
170
+ 0.008057458877563477,
171
+ 0.008059219360351562,
172
+ 0.008037458419799805,
173
+ 0.008047219276428222,
174
+ 0.00805953884124756,
175
+ 0.008024818420410156,
176
+ 0.008104819297790528,
177
+ 0.008076178550720214,
178
+ 0.00810689926147461,
179
+ 0.008057778358459472,
180
+ 0.008055538177490234,
181
+ 0.00804945945739746,
182
+ 0.008023219108581543,
183
+ 0.008051538467407226,
184
+ 0.008116498947143554,
185
+ 0.020581647872924803,
186
+ 0.0024056050777435303,
187
+ 0.007571218013763428,
188
+ 0.008111538887023926,
189
+ 0.008020498275756837,
190
+ 0.00807873821258545,
191
+ 0.008028658866882324,
192
+ 0.008018738746643066,
193
+ 0.008008498191833496,
194
+ 0.008020018577575684,
195
+ 0.008060978889465333,
196
+ 0.008082258224487304,
197
+ 0.008067858695983888,
198
+ 0.008116179466247559,
199
+ 0.008028338432312012,
200
+ 0.008070098876953124,
201
+ 0.008085779190063477,
202
+ 0.008057458877563477,
203
+ 0.008072978019714355,
204
+ 0.008061139106750489,
205
+ 0.00802721881866455,
206
+ 0.008122259140014649,
207
+ 0.008056017875671386,
208
+ 0.008065459251403808,
209
+ 0.008024179458618164,
210
+ 0.00804385757446289,
211
+ 0.008019858360290528,
212
+ 0.00807201862335205,
213
+ 0.00824225902557373,
214
+ 0.008373780250549316,
215
+ 0.008352338790893555,
216
+ 0.008333938598632813,
217
+ 0.008247380256652831,
218
+ 0.008218419075012208,
219
+ 0.008208019256591797,
220
+ 0.008250578880310059,
221
+ 0.008353778839111328,
222
+ 0.008391058921813965,
223
+ 0.008280818939208984,
224
+ 0.008257458686828614,
225
+ 0.008253780364990235,
226
+ 0.008161778450012208,
227
+ 0.00810817813873291,
228
+ 0.008107858657836914,
229
+ 0.0080648193359375,
230
+ 0.008102099418640136,
231
+ 0.008151538848876953,
232
+ 0.008078577995300294,
233
+ 0.008069138526916503,
234
+ 0.008074898719787598,
235
+ 0.008075058937072754,
236
+ 0.008064178466796875,
237
+ 0.00801137924194336,
238
+ 0.0080681791305542,
239
+ 0.008024337768554688,
240
+ 0.008025938987731934,
241
+ 0.008184979438781739,
242
+ 0.008429940223693849,
243
+ 0.008315379142761231,
244
+ 0.008136018753051758,
245
+ 0.008149298667907715,
246
+ 0.008106259346008301,
247
+ 0.008119378089904785,
248
+ 0.008045937538146973,
249
+ 0.008098258972167968,
250
+ 0.008043059349060059,
251
+ 0.008034897804260253,
252
+ 0.008110098838806153,
253
+ 0.008032979011535645,
254
+ 0.00806497859954834,
255
+ 0.008048337936401367,
256
+ 0.008052179336547852,
257
+ 0.008092179298400878,
258
+ 0.008042898178100586,
259
+ 0.008106738090515138,
260
+ 0.008353779792785645,
261
+ 0.008196019172668456,
262
+ 0.008046419143676757,
263
+ 0.007996017932891846,
264
+ 0.008087537765502929,
265
+ 0.008040658950805664,
266
+ 0.00805457878112793,
267
+ 0.00802209758758545,
268
+ 0.008050739288330078,
269
+ 0.008095059394836426,
270
+ 0.008087538719177246,
271
+ 0.008047698020935058,
272
+ 0.00805137825012207,
273
+ 0.008013298988342285,
274
+ 0.008023537635803223,
275
+ 0.008038257598876953,
276
+ 0.008039539337158202,
277
+ 0.008047698020935058,
278
+ 0.008026898384094238,
279
+ 0.008031859397888183
 
 
 
 
 
 
 
280
  ]
281
  },
282
  "throughput": {
283
  "unit": "samples/s",
284
+ "value": 122.57119346135018
285
  },
286
  "energy": null,
287
  "efficiency": null