IlyasMoutawwakil HF staff commited on
Commit
05e842c
·
verified ·
1 Parent(s): 33294c4

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
@@ -111,24 +111,24 @@
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 1032.974336,
115
- "max_global_vram": 843.251712,
116
- "max_process_vram": 48329.367552,
117
  "max_reserved": 555.74528,
118
  "max_allocated": 499.374592
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
- "total": 9.0151884765625,
124
- "mean": 9.0151884765625,
125
  "stdev": 0.0,
126
- "p50": 9.0151884765625,
127
- "p90": 9.0151884765625,
128
- "p95": 9.0151884765625,
129
- "p99": 9.0151884765625,
130
  "values": [
131
- 9.0151884765625
132
  ]
133
  },
134
  "throughput": null,
@@ -138,153 +138,144 @@
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 1151.393792,
142
- "max_global_vram": 903.299072,
143
- "max_process_vram": 243481.837568,
144
  "max_reserved": 555.74528,
145
  "max_allocated": 499.4432
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 125,
150
- "total": 0.9961886627674099,
151
- "mean": 0.007969509302139284,
152
- "stdev": 0.007743944964142123,
153
- "p50": 0.007831494808197021,
154
- "p90": 0.008019397926330567,
155
- "p95": 0.008041574478149415,
156
- "p99": 0.008154610786437989,
157
  "values": [
158
- 0.008008455276489258,
159
- 0.008075814247131347,
160
- 0.008017574310302734,
161
- 0.007979175090789796,
162
- 0.008028134346008301,
163
- 0.007969415187835694,
164
- 0.00817949390411377,
165
- 0.007851974964141846,
166
- 0.007816615104675293,
167
- 0.007785574913024902,
168
- 0.007766695022583008,
169
- 0.007797094821929931,
170
- 0.007811975002288818,
171
- 0.007773255825042725,
172
- 0.007819655895233155,
173
- 0.007773736000061035,
174
- 0.007773574829101563,
175
- 0.0077743749618530275,
176
- 0.007753415107727051,
177
- 0.00773709487915039,
178
- 0.007764135837554932,
179
- 0.0077558159828186035,
180
- 0.007748455047607422,
181
- 0.007779815196990967,
182
- 0.007781575202941895,
183
- 0.09214866638183594,
184
- 0.0022166330814361573,
185
- 0.00237119197845459,
186
- 0.002261112928390503,
187
- 0.0022028729915618897,
188
- 0.002198072910308838,
189
- 0.0022182331085205077,
190
- 0.0024436719417572023,
191
- 0.0022028729915618897,
192
- 0.0022047929763793946,
193
- 0.0025163118839263915,
194
- 0.002317272901535034,
195
- 0.0022025530338287353,
196
- 0.003608628034591675,
197
- 0.007744615077972412,
198
- 0.007764774799346924,
199
- 0.007744295120239258,
200
- 0.007841414928436279,
201
- 0.00786541509628296,
202
- 0.0078206148147583,
203
- 0.007777735233306885,
204
- 0.007828774929046631,
205
- 0.007816454887390138,
206
- 0.007842695236206055,
207
- 0.007858695030212403,
208
- 0.00786861515045166,
209
- 0.007879334926605225,
210
- 0.007881895065307617,
211
- 0.007832294940948486,
212
- 0.007831494808197021,
213
- 0.007831335067749023,
214
- 0.007791494846343994,
215
- 0.007787014961242676,
216
- 0.007869575023651123,
217
- 0.00803693389892578,
218
- 0.00803133487701416,
219
- 0.007947975158691407,
220
- 0.007839175224304199,
221
- 0.007989093780517579,
222
- 0.00793261480331421,
223
- 0.007861734867095948,
224
- 0.007890055179595947,
225
- 0.007948453903198243,
226
- 0.008041734695434571,
227
- 0.007975174903869628,
228
- 0.00795053482055664,
229
- 0.00794061517715454,
230
- 0.00799037504196167,
231
- 0.008063974380493165,
232
- 0.008020613670349121,
233
- 0.007954535007476806,
234
- 0.007971333980560302,
235
- 0.007897415161132813,
236
- 0.007793574810028076,
237
- 0.00781565523147583,
238
- 0.007819654941558838,
239
- 0.007798534870147705,
240
- 0.007776615142822266,
241
- 0.007854694843292237,
242
- 0.007764295101165771,
243
- 0.007825574874877929,
244
- 0.0077860550880432125,
245
- 0.00778733491897583,
246
- 0.008055174827575683,
247
- 0.008046854019165038,
248
- 0.008015494346618652,
249
- 0.007928935050964356,
250
- 0.007909894943237305,
251
- 0.007914374828338623,
252
- 0.007858695030212403,
253
- 0.007836295127868651,
254
- 0.007825415134429931,
255
- 0.00783757495880127,
256
- 0.007859014987945556,
257
- 0.007842374801635741,
258
- 0.007918213844299317,
259
- 0.00788941478729248,
260
- 0.007991654872894287,
261
- 0.007927334785461426,
262
- 0.007904294967651368,
263
- 0.00794061517715454,
264
- 0.007885094165802003,
265
- 0.007872135162353515,
266
- 0.00804093360900879,
267
- 0.007942375183105468,
268
- 0.007778055191040039,
269
- 0.007870695114135743,
270
- 0.007780934810638428,
271
- 0.007789414882659912,
272
- 0.007855655193328858,
273
- 0.008022053718566895,
274
- 0.007743015766143799,
275
- 0.007730535984039307,
276
- 0.007770374774932861,
277
- 0.007754694938659668,
278
- 0.007747654914855957,
279
- 0.007707975864410401,
280
- 0.00774477481842041,
281
- 0.0076959748268127445,
282
- 0.007734694957733154
283
  ]
284
  },
285
  "throughput": {
286
  "unit": "samples/s",
287
- "value": 125.47823988756328
288
  },
289
  "energy": null,
290
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
 
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
+ "max_ram": 1389.121536,
115
+ "max_global_vram": 12.857344,
116
+ "max_process_vram": 0.0,
117
  "max_reserved": 555.74528,
118
  "max_allocated": 499.374592
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
+ "total": 10.75170703125,
124
+ "mean": 10.75170703125,
125
  "stdev": 0.0,
126
+ "p50": 10.75170703125,
127
+ "p90": 10.75170703125,
128
+ "p95": 10.75170703125,
129
+ "p99": 10.75170703125,
130
  "values": [
131
+ 10.75170703125
132
  ]
133
  },
134
  "throughput": null,
 
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
+ "max_ram": 1518.350336,
142
+ "max_global_vram": 12.96384,
143
+ "max_process_vram": 0.0,
144
  "max_reserved": 555.74528,
145
  "max_allocated": 499.4432
146
  },
147
  "latency": {
148
  "unit": "s",
149
+ "count": 116,
150
+ "total": 0.9980110979080201,
151
+ "mean": 0.00860354394748293,
152
+ "stdev": 0.00012974770529353111,
153
+ "p50": 0.008562687397003173,
154
+ "p90": 0.008769486427307129,
155
+ "p95": 0.008832646369934083,
156
+ "p99": 0.009039926385879516,
157
  "values": [
158
+ 0.008828606605529785,
159
+ 0.008980766296386718,
160
+ 0.009050366401672363,
161
+ 0.009339964866638184,
162
+ 0.008928446769714356,
163
+ 0.008880447387695313,
164
+ 0.008844765663146972,
165
+ 0.008795487403869628,
166
+ 0.00866172695159912,
167
+ 0.008657567024230957,
168
+ 0.008702366828918458,
169
+ 0.008669246673583984,
170
+ 0.008776926040649414,
171
+ 0.008593567848205566,
172
+ 0.008554367065429688,
173
+ 0.008600607872009277,
174
+ 0.008574527740478516,
175
+ 0.008541887283325195,
176
+ 0.008555487632751465,
177
+ 0.008531967163085937,
178
+ 0.00858844757080078,
179
+ 0.00854108715057373,
180
+ 0.008489407539367676,
181
+ 0.008535008430480957,
182
+ 0.008509247779846192,
183
+ 0.008467488288879395,
184
+ 0.00855756664276123,
185
+ 0.00857692813873291,
186
+ 0.008515007019042968,
187
+ 0.008462207794189453,
188
+ 0.008562527656555175,
189
+ 0.008553248405456543,
190
+ 0.008482687950134278,
191
+ 0.008536446571350097,
192
+ 0.00854860782623291,
193
+ 0.008535806655883788,
194
+ 0.008506048202514648,
195
+ 0.008518048286437988,
196
+ 0.008494047164916993,
197
+ 0.008599328041076661,
198
+ 0.008585887908935547,
199
+ 0.00851420783996582,
200
+ 0.008533087730407715,
201
+ 0.00852012825012207,
202
+ 0.008515007019042968,
203
+ 0.008548128128051757,
204
+ 0.00860460662841797,
205
+ 0.008547327995300292,
206
+ 0.008542048454284667,
207
+ 0.008517727851867676,
208
+ 0.008523647308349609,
209
+ 0.008473407745361329,
210
+ 0.008461407661437988,
211
+ 0.008610366821289062,
212
+ 0.008771327018737793,
213
+ 0.00863500690460205,
214
+ 0.008598047256469727,
215
+ 0.008672126770019531,
216
+ 0.008594688415527343,
217
+ 0.008612768173217773,
218
+ 0.008651646614074707,
219
+ 0.00868300724029541,
220
+ 0.00875612735748291,
221
+ 0.00870380687713623,
222
+ 0.008706047058105468,
223
+ 0.008675646781921387,
224
+ 0.008629568099975586,
225
+ 0.008613086700439454,
226
+ 0.008654367446899415,
227
+ 0.008728126525878906,
228
+ 0.008562847137451172,
229
+ 0.008538047790527343,
230
+ 0.008589886665344238,
231
+ 0.008579647064208985,
232
+ 0.008567008018493652,
233
+ 0.008506048202514648,
234
+ 0.008518367767333985,
235
+ 0.008551008224487304,
236
+ 0.008543487548828124,
237
+ 0.008507967948913574,
238
+ 0.008487007141113282,
239
+ 0.008456768035888672,
240
+ 0.00852620792388916,
241
+ 0.008570527076721192,
242
+ 0.008515328407287597,
243
+ 0.008810046195983887,
244
+ 0.00879068660736084,
245
+ 0.008767645835876465,
246
+ 0.008625408172607422,
247
+ 0.008613086700439454,
248
+ 0.00867532730102539,
249
+ 0.008546687126159669,
250
+ 0.008677566528320312,
251
+ 0.008644607543945313,
252
+ 0.008554368019104005,
253
+ 0.008590046882629394,
254
+ 0.008612447738647461,
255
+ 0.00862492847442627,
256
+ 0.008527647018432618,
257
+ 0.008657727241516113,
258
+ 0.00852012825012207,
259
+ 0.008538686752319335,
260
+ 0.008517727851867676,
261
+ 0.008514528274536132,
262
+ 0.008551807403564453,
263
+ 0.008519968032836915,
264
+ 0.00859452724456787,
265
+ 0.008554688453674316,
266
+ 0.00856924819946289,
267
+ 0.008511167526245117,
268
+ 0.008477408409118653,
269
+ 0.008587806701660157,
270
+ 0.008518047332763672,
271
+ 0.00845148754119873,
272
+ 0.008516127586364747,
273
+ 0.008498686790466309
 
 
 
 
 
 
 
 
 
274
  ]
275
  },
276
  "throughput": {
277
  "unit": "samples/s",
278
+ "value": 116.23117242198337
279
  },
280
  "energy": null,
281
  "efficiency": null