Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -3,7 +3,7 @@
|
|
3 |
"name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
|
4 |
"backend": {
|
5 |
"name": "pytorch",
|
6 |
-
"version": "2.
|
7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
8 |
"task": "multiple-choice",
|
9 |
"library": "transformers",
|
@@ -44,9 +44,9 @@
|
|
44 |
"duration": 1,
|
45 |
"warmup_runs": 1,
|
46 |
"input_shapes": {
|
47 |
-
"batch_size":
|
48 |
-
"
|
49 |
-
"
|
50 |
},
|
51 |
"new_tokens": null,
|
52 |
"memory": true,
|
@@ -73,10 +73,10 @@
|
|
73 |
"environment": {
|
74 |
"cpu": " AMD EPYC 7R32",
|
75 |
"cpu_count": 16,
|
76 |
-
"cpu_ram_mb": 66697.
|
77 |
"system": "Linux",
|
78 |
"machine": "x86_64",
|
79 |
-
"platform": "Linux-5.10.
|
80 |
"processor": "x86_64",
|
81 |
"python_version": "3.10.12",
|
82 |
"gpu": [
|
@@ -86,15 +86,15 @@
|
|
86 |
"gpu_vram_mb": 24146608128,
|
87 |
"optimum_benchmark_version": "0.5.0.dev0",
|
88 |
"optimum_benchmark_commit": null,
|
89 |
-
"transformers_version": "4.
|
90 |
"transformers_commit": null,
|
91 |
-
"accelerate_version": "1.
|
92 |
"accelerate_commit": null,
|
93 |
-
"diffusers_version": "0.
|
94 |
"diffusers_commit": null,
|
95 |
"optimum_version": null,
|
96 |
"optimum_commit": null,
|
97 |
-
"timm_version": "1.0.
|
98 |
"timm_commit": null,
|
99 |
"peft_version": "0.13.2",
|
100 |
"peft_commit": null
|
@@ -106,7 +106,7 @@
|
|
106 |
"load": {
|
107 |
"memory": {
|
108 |
"unit": "MB",
|
109 |
-
"max_ram":
|
110 |
"max_global_vram": 1192.7552,
|
111 |
"max_process_vram": 0.0,
|
112 |
"max_reserved": 555.74528,
|
@@ -115,15 +115,15 @@
|
|
115 |
"latency": {
|
116 |
"unit": "s",
|
117 |
"values": [
|
118 |
-
0.
|
119 |
],
|
120 |
"count": 1,
|
121 |
-
"total": 0.
|
122 |
-
"mean": 0.
|
123 |
-
"p50": 0.
|
124 |
-
"p90": 0.
|
125 |
-
"p95": 0.
|
126 |
-
"p99": 0.
|
127 |
"stdev": 0,
|
128 |
"stdev_": 0
|
129 |
},
|
@@ -134,212 +134,203 @@
|
|
134 |
"forward": {
|
135 |
"memory": {
|
136 |
"unit": "MB",
|
137 |
-
"max_ram":
|
138 |
-
"max_global_vram":
|
139 |
"max_process_vram": 0.0,
|
140 |
-
"max_reserved":
|
141 |
-
"max_allocated":
|
142 |
},
|
143 |
"latency": {
|
144 |
"unit": "s",
|
145 |
"values": [
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.005574656009674072,
|
152 |
-
0.005576704025268555,
|
153 |
-
0.005568511962890625,
|
154 |
-
0.0055920639038085935,
|
155 |
-
0.005554175853729248,
|
156 |
-
0.005559296131134033,
|
157 |
-
0.005542912006378174,
|
158 |
-
0.005595136165618897,
|
159 |
-
0.005565375804901123,
|
160 |
-
0.005527488231658936,
|
161 |
-
0.005544960021972656,
|
162 |
-
0.005532671928405761,
|
163 |
-
0.00552345609664917,
|
164 |
-
0.005533696174621582,
|
165 |
-
0.005552127838134766,
|
166 |
-
0.005541888236999512,
|
167 |
-
0.005570559978485107,
|
168 |
-
0.005511104106903076,
|
169 |
-
0.0055552000999450684,
|
170 |
-
0.005582848072052002,
|
171 |
-
0.005628928184509278,
|
172 |
-
0.0055511040687561035,
|
173 |
-
0.005879807949066162,
|
174 |
-
0.005880832195281982,
|
175 |
-
0.00586137580871582,
|
176 |
-
0.005840896129608154,
|
177 |
-
0.005801983833312988,
|
178 |
-
0.005889023780822754,
|
179 |
-
0.005810175895690918,
|
180 |
-
0.005811200141906738,
|
181 |
-
0.005761023998260498,
|
182 |
-
0.006789120197296142,
|
183 |
-
0.006370304107666015,
|
184 |
-
0.006107071876525879,
|
185 |
-
0.005824512004852295,
|
186 |
-
0.006033408164978027,
|
187 |
-
0.006235072135925293,
|
188 |
-
0.006045695781707764,
|
189 |
-
0.005793791770935058,
|
190 |
-
0.005856287956237793,
|
191 |
-
0.0057712640762329105,
|
192 |
-
0.005990399837493897,
|
193 |
-
0.005850111961364746,
|
194 |
-
0.005860352039337159,
|
195 |
-
0.005879807949066162,
|
196 |
0.005854207992553711,
|
197 |
-
0.005763072013854981,
|
198 |
-
0.005797887802124023,
|
199 |
-
0.005780479907989502,
|
200 |
-
0.005784575939178467,
|
201 |
-
0.005795839786529541,
|
202 |
-
0.005777408123016357,
|
203 |
-
0.005819392204284668,
|
204 |
-
0.0057487359046936035,
|
205 |
0.00582041597366333,
|
206 |
-
0.
|
207 |
-
0.
|
208 |
-
0.
|
209 |
-
0.
|
210 |
-
0.
|
211 |
-
0.
|
212 |
-
0.005776383876800537,
|
213 |
-
0.0057415680885314945,
|
214 |
-
0.005810175895690918,
|
215 |
-
0.0057712640762329105,
|
216 |
-
0.006140927791595459,
|
217 |
-
0.0057784318923950195,
|
218 |
-
0.005826560020446778,
|
219 |
-
0.005761023998260498,
|
220 |
-
0.0057916479110717775,
|
221 |
0.005948416233062744,
|
222 |
-
0.
|
223 |
-
0.
|
224 |
-
0.
|
225 |
-
0.
|
226 |
-
0.
|
227 |
-
0.
|
228 |
-
0.
|
229 |
-
0.
|
230 |
-
0.
|
231 |
-
0.
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
240 |
-
0.
|
241 |
-
0.
|
242 |
-
0.
|
243 |
-
0.
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.
|
248 |
-
0.
|
249 |
-
0.
|
250 |
-
0.
|
251 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
252 |
0.006081535816192627,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
253 |
0.0060631041526794435,
|
254 |
-
0.
|
255 |
-
0.
|
256 |
-
0.
|
257 |
-
0.
|
258 |
-
0.
|
259 |
-
0.
|
260 |
-
0.
|
261 |
-
0.
|
262 |
-
0.
|
263 |
-
0.
|
264 |
-
0.
|
265 |
-
0.
|
266 |
-
0.
|
267 |
-
0.
|
268 |
-
0.
|
269 |
-
0.
|
270 |
-
0.
|
271 |
-
0.
|
272 |
-
0.
|
273 |
-
0.
|
274 |
-
0.
|
275 |
-
0.
|
276 |
-
0.
|
277 |
-
0.
|
278 |
-
0.
|
279 |
-
0.
|
280 |
-
0.
|
281 |
-
0.
|
282 |
-
0.
|
283 |
-
0.
|
284 |
-
0.
|
285 |
-
0.
|
286 |
-
0.
|
287 |
-
0.
|
288 |
-
0.
|
289 |
-
0.
|
290 |
-
0.
|
291 |
-
0.
|
292 |
-
0.
|
293 |
-
0.
|
294 |
-
0.
|
295 |
-
0.
|
296 |
-
0.
|
297 |
-
0.
|
298 |
-
0.
|
299 |
-
0.
|
300 |
-
0.
|
301 |
-
0.
|
302 |
-
0.
|
303 |
-
0.
|
304 |
-
0.
|
305 |
-
0.
|
306 |
-
0.
|
307 |
-
0.
|
308 |
-
0.
|
309 |
-
0.
|
310 |
-
0.
|
311 |
-
0.
|
312 |
-
0.
|
313 |
-
0.
|
314 |
-
0.
|
315 |
-
0.
|
316 |
-
0.00573747205734253,
|
317 |
-
0.005742623805999756
|
318 |
],
|
319 |
-
"count":
|
320 |
-
"total":
|
321 |
-
"mean": 0.
|
322 |
-
"p50": 0.
|
323 |
-
"p90": 0.
|
324 |
-
"p95": 0.
|
325 |
-
"p99": 0.
|
326 |
-
"stdev": 0.
|
327 |
-
"stdev_":
|
328 |
},
|
329 |
"throughput": {
|
330 |
"unit": "samples/s",
|
331 |
-
"value":
|
332 |
},
|
333 |
"energy": {
|
334 |
"unit": "kWh",
|
335 |
-
"cpu":
|
336 |
-
"ram": 3.
|
337 |
-
"gpu": 1.
|
338 |
-
"total": 2.
|
339 |
},
|
340 |
"efficiency": {
|
341 |
"unit": "samples/kWh",
|
342 |
-
"value":
|
343 |
}
|
344 |
}
|
345 |
}
|
|
|
3 |
"name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
|
4 |
"backend": {
|
5 |
"name": "pytorch",
|
6 |
+
"version": "2.5.1+cu124",
|
7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
8 |
"task": "multiple-choice",
|
9 |
"library": "transformers",
|
|
|
44 |
"duration": 1,
|
45 |
"warmup_runs": 1,
|
46 |
"input_shapes": {
|
47 |
+
"batch_size": 2,
|
48 |
+
"sequence_length": 16,
|
49 |
+
"num_choices": 2
|
50 |
},
|
51 |
"new_tokens": null,
|
52 |
"memory": true,
|
|
|
73 |
"environment": {
|
74 |
"cpu": " AMD EPYC 7R32",
|
75 |
"cpu_count": 16,
|
76 |
+
"cpu_ram_mb": 66697.248768,
|
77 |
"system": "Linux",
|
78 |
"machine": "x86_64",
|
79 |
+
"platform": "Linux-5.10.227-219.884.amzn2.x86_64-x86_64-with-glibc2.35",
|
80 |
"processor": "x86_64",
|
81 |
"python_version": "3.10.12",
|
82 |
"gpu": [
|
|
|
86 |
"gpu_vram_mb": 24146608128,
|
87 |
"optimum_benchmark_version": "0.5.0.dev0",
|
88 |
"optimum_benchmark_commit": null,
|
89 |
+
"transformers_version": "4.46.3",
|
90 |
"transformers_commit": null,
|
91 |
+
"accelerate_version": "1.1.1",
|
92 |
"accelerate_commit": null,
|
93 |
+
"diffusers_version": "0.31.0",
|
94 |
"diffusers_commit": null,
|
95 |
"optimum_version": null,
|
96 |
"optimum_commit": null,
|
97 |
+
"timm_version": "1.0.11",
|
98 |
"timm_commit": null,
|
99 |
"peft_version": "0.13.2",
|
100 |
"peft_commit": null
|
|
|
106 |
"load": {
|
107 |
"memory": {
|
108 |
"unit": "MB",
|
109 |
+
"max_ram": 818.62656,
|
110 |
"max_global_vram": 1192.7552,
|
111 |
"max_process_vram": 0.0,
|
112 |
"max_reserved": 555.74528,
|
|
|
115 |
"latency": {
|
116 |
"unit": "s",
|
117 |
"values": [
|
118 |
+
0.07860838317871094
|
119 |
],
|
120 |
"count": 1,
|
121 |
+
"total": 0.07860838317871094,
|
122 |
+
"mean": 0.07860838317871094,
|
123 |
+
"p50": 0.07860838317871094,
|
124 |
+
"p90": 0.07860838317871094,
|
125 |
+
"p95": 0.07860838317871094,
|
126 |
+
"p99": 0.07860838317871094,
|
127 |
"stdev": 0,
|
128 |
"stdev_": 0
|
129 |
},
|
|
|
134 |
"forward": {
|
135 |
"memory": {
|
136 |
"unit": "MB",
|
137 |
+
"max_ram": 1095.979008,
|
138 |
+
"max_global_vram": 1207.435264,
|
139 |
"max_process_vram": 0.0,
|
140 |
+
"max_reserved": 559.939584,
|
141 |
+
"max_allocated": 510.51264
|
142 |
},
|
143 |
"latency": {
|
144 |
"unit": "s",
|
145 |
"values": [
|
146 |
+
0.006214655876159668,
|
147 |
+
0.005856256008148194,
|
148 |
+
0.005955584049224853,
|
149 |
+
0.005839871883392334,
|
150 |
+
0.005901311874389649,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
151 |
0.005854207992553711,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
152 |
0.00582041597366333,
|
153 |
+
0.005863520145416259,
|
154 |
+
0.005832736015319824,
|
155 |
+
0.005841983795166016,
|
156 |
+
0.005908480167388916,
|
157 |
+
0.0058009281158447265,
|
158 |
+
0.005852159976959229,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
159 |
0.005948416233062744,
|
160 |
+
0.0058388481140136715,
|
161 |
+
0.0061173119544982914,
|
162 |
+
0.006068384170532227,
|
163 |
+
0.006048768043518066,
|
164 |
+
0.00608358383178711,
|
165 |
+
0.006043776035308838,
|
166 |
+
0.00603545618057251,
|
167 |
+
0.006082560062408447,
|
168 |
+
0.006050816059112549,
|
169 |
+
0.006079487800598145,
|
170 |
+
0.006041600227355957,
|
171 |
+
0.006078495979309082,
|
172 |
+
0.006111231803894043,
|
173 |
+
0.006073247909545898,
|
174 |
+
0.006058080196380615,
|
175 |
+
0.00601907205581665,
|
176 |
+
0.006022143840789795,
|
177 |
+
0.00603545618057251,
|
178 |
+
0.006392672061920166,
|
179 |
+
0.006024191856384278,
|
180 |
+
0.006037504196166992,
|
181 |
+
0.006036608219146728,
|
182 |
+
0.006060031890869141,
|
183 |
+
0.006049791812896729,
|
184 |
+
0.006040671825408936,
|
185 |
+
0.006057983875274659,
|
186 |
+
0.006081759929656982,
|
187 |
+
0.00617574405670166,
|
188 |
+
0.006103040218353272,
|
189 |
+
0.006230016231536865,
|
190 |
+
0.006047776222229004,
|
191 |
+
0.006239232063293457,
|
192 |
+
0.006216639995574951,
|
193 |
+
0.006153151988983154,
|
194 |
+
0.006212736129760742,
|
195 |
+
0.006202271938323975,
|
196 |
+
0.006013951778411865,
|
197 |
+
0.006039552211761475,
|
198 |
+
0.006052864074707031,
|
199 |
+
0.006053887844085694,
|
200 |
+
0.00603545618057251,
|
201 |
+
0.0060702719688415525,
|
202 |
+
0.006042623996734619,
|
203 |
+
0.006091775894165039,
|
204 |
+
0.006135807991027832,
|
205 |
+
0.006127615928649902,
|
206 |
+
0.006301695823669433,
|
207 |
+
0.006079487800598145,
|
208 |
+
0.006060128211975098,
|
209 |
+
0.006086656093597412,
|
210 |
+
0.006072319984436035,
|
211 |
+
0.0060702719688415525,
|
212 |
+
0.006247424125671387,
|
213 |
+
0.0062679038047790524,
|
214 |
+
0.006141952037811279,
|
215 |
+
0.006053887844085694,
|
216 |
+
0.006085472106933594,
|
217 |
+
0.006106239795684814,
|
218 |
+
0.006350815773010254,
|
219 |
+
0.0065812478065490725,
|
220 |
+
0.0062975997924804685,
|
221 |
+
0.006351871967315674,
|
222 |
+
0.006908927917480469,
|
223 |
+
0.00638153600692749,
|
224 |
+
0.006269951820373535,
|
225 |
+
0.006051839828491211,
|
226 |
+
0.00607539176940918,
|
227 |
0.006081535816192627,
|
228 |
+
0.006051839828491211,
|
229 |
+
0.006238207817077636,
|
230 |
+
0.006097760200500488,
|
231 |
+
0.00626585578918457,
|
232 |
+
0.006073344230651856,
|
233 |
+
0.006451200008392334,
|
234 |
+
0.006067200183868408,
|
235 |
+
0.006053887844085694,
|
236 |
+
0.006095871925354004,
|
237 |
+
0.006076416015625,
|
238 |
+
0.006065152168273926,
|
239 |
+
0.006015999794006348,
|
240 |
+
0.006032447814941406,
|
241 |
+
0.006021120071411133,
|
242 |
+
0.006044672012329101,
|
243 |
+
0.006071263790130615,
|
244 |
+
0.006092895984649658,
|
245 |
+
0.006009856224060059,
|
246 |
0.0060631041526794435,
|
247 |
+
0.006254591941833496,
|
248 |
+
0.006309919834136963,
|
249 |
+
0.00608358383178711,
|
250 |
+
0.006085631847381592,
|
251 |
+
0.006030335903167725,
|
252 |
+
0.006025216102600098,
|
253 |
+
0.006188992023468017,
|
254 |
+
0.006110208034515381,
|
255 |
+
0.006051839828491211,
|
256 |
+
0.006108191967010498,
|
257 |
+
0.006037504196166992,
|
258 |
+
0.006231040000915527,
|
259 |
+
0.006074368000030517,
|
260 |
+
0.0060415358543396,
|
261 |
+
0.006061056137084961,
|
262 |
+
0.006004735946655273,
|
263 |
+
0.006032383918762207,
|
264 |
+
0.006031360149383545,
|
265 |
+
0.006402048110961914,
|
266 |
+
0.006333439826965332,
|
267 |
+
0.006312960147857666,
|
268 |
+
0.006235136032104492,
|
269 |
+
0.00633241605758667,
|
270 |
+
0.0062566399574279785,
|
271 |
+
0.0062156801223754886,
|
272 |
+
0.006345727920532227,
|
273 |
+
0.006351871967315674,
|
274 |
+
0.00633241605758667,
|
275 |
+
0.00624128007888794,
|
276 |
+
0.006188032150268555,
|
277 |
+
0.006275072097778321,
|
278 |
+
0.006169600009918213,
|
279 |
+
0.006275072097778321,
|
280 |
+
0.0062740478515625,
|
281 |
+
0.006436863899230957,
|
282 |
+
0.006354944229125976,
|
283 |
+
0.006200319766998291,
|
284 |
+
0.006202367782592774,
|
285 |
+
0.006165503978729248,
|
286 |
+
0.006191103935241699,
|
287 |
+
0.006100992202758789,
|
288 |
+
0.006087679862976075,
|
289 |
+
0.006104063987731933,
|
290 |
+
0.00611737585067749,
|
291 |
+
0.006138879776000977,
|
292 |
+
0.006182911872863769,
|
293 |
+
0.0062863359451293946,
|
294 |
+
0.006143008232116699,
|
295 |
+
0.006076543807983398,
|
296 |
+
0.006026303768157959,
|
297 |
+
0.006076511859893799,
|
298 |
+
0.006050816059112549,
|
299 |
+
0.006196224212646485,
|
300 |
+
0.006067200183868408,
|
301 |
+
0.00601203203201294,
|
302 |
+
0.00612665605545044,
|
303 |
+
0.006106112003326416,
|
304 |
+
0.006056960105895996,
|
305 |
+
0.00601804780960083,
|
306 |
+
0.006058047771453858,
|
307 |
+
0.006022079944610596,
|
308 |
+
0.0061008319854736325
|
|
|
|
|
309 |
],
|
310 |
+
"count": 163,
|
311 |
+
"total": 0.9972273898124694,
|
312 |
+
"mean": 0.006117959446702267,
|
313 |
+
"p50": 0.006079487800598145,
|
314 |
+
"p90": 0.006308275032043458,
|
315 |
+
"p95": 0.006351871967315674,
|
316 |
+
"p99": 0.006500618171691894,
|
317 |
+
"stdev": 0.00014707380597687064,
|
318 |
+
"stdev_": 2.4039683044343665
|
319 |
},
|
320 |
"throughput": {
|
321 |
"unit": "samples/s",
|
322 |
+
"value": 326.9063839705655
|
323 |
},
|
324 |
"energy": {
|
325 |
"unit": "kWh",
|
326 |
+
"cpu": 7.282588151839846e-08,
|
327 |
+
"ram": 3.978702170203643e-08,
|
328 |
+
"gpu": 1.8107717280981825e-07,
|
329 |
+
"total": 2.9369007603025315e-07
|
330 |
},
|
331 |
"efficiency": {
|
332 |
"unit": "samples/kWh",
|
333 |
+
"value": 6809899.833980019
|
334 |
}
|
335 |
}
|
336 |
}
|