IlyasMoutawwakil HF staff commited on
Commit
33f39e4
·
verified ·
1 Parent(s): bf039e1

Upload cuda_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json with huggingface_hub

Browse files
cuda_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_timm_image-classification_timm/resnet50.a1_in1k",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "timm",
@@ -104,24 +104,24 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1465.921536,
108
- "max_global_vram": 0.0,
109
- "max_process_vram": 0.0,
110
  "max_reserved": 123.731968,
111
  "max_allocated": 102.475264
112
  },
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.96849072265625,
117
- "mean": 7.96849072265625,
118
  "stdev": 0.0,
119
- "p50": 7.96849072265625,
120
- "p90": 7.96849072265625,
121
- "p95": 7.96849072265625,
122
- "p99": 7.96849072265625,
123
  "values": [
124
- 7.96849072265625
125
  ]
126
  },
127
  "throughput": null,
@@ -131,154 +131,167 @@
131
  "forward": {
132
  "memory": {
133
  "unit": "MB",
134
- "max_ram": 1550.47936,
135
- "max_global_vram": 0.0,
136
- "max_process_vram": 0.0,
137
  "max_reserved": 148.897792,
138
  "max_allocated": 113.516032
139
  },
140
  "latency": {
141
  "unit": "s",
142
- "count": 126,
143
- "total": 1.0020476994514467,
144
- "mean": 0.007952759519455926,
145
- "stdev": 0.0003899351168821052,
146
- "p50": 0.007858537435531616,
147
- "p90": 0.008433255672454835,
148
- "p95": 0.008625015020370483,
149
- "p99": 0.008680495262145996,
150
  "values": [
151
- 0.008289896011352538,
152
- 0.007897576808929444,
153
- 0.007972297191619872,
154
- 0.008167496681213379,
155
- 0.00801341724395752,
156
- 0.008151816368103028,
157
- 0.008172296524047851,
158
- 0.008127656936645508,
159
- 0.007944138050079345,
160
- 0.0077236580848693845,
161
- 0.007712777137756348,
162
- 0.007682697772979736,
163
- 0.007841097831726073,
164
- 0.007826376914978027,
165
- 0.007595499038696289,
166
- 0.007580937862396241,
167
- 0.007572937965393067,
168
- 0.007617097854614258,
169
- 0.007600458145141602,
170
- 0.007585258960723877,
171
- 0.0075646181106567385,
172
- 0.0075633378028869625,
173
- 0.007539498805999756,
174
- 0.007498379230499268,
175
- 0.007597097873687744,
176
- 0.007695978164672852,
177
- 0.007778378009796143,
178
- 0.007862536907196046,
179
- 0.007825098037719726,
180
- 0.007831336975097657,
181
- 0.007770698070526123,
182
- 0.007791976928710937,
183
- 0.007770217895507812,
184
- 0.007724617958068848,
185
- 0.0076783781051635745,
186
- 0.007679977893829346,
187
- 0.007654218196868896,
188
- 0.007616298198699951,
189
- 0.0107542085647583,
190
- 0.008088296890258789,
191
- 0.007944137096405028,
192
- 0.007755497932434082,
193
- 0.008053096771240234,
194
- 0.00844509506225586,
195
- 0.008093096733093262,
196
- 0.007877577781677246,
197
- 0.008130057334899903,
198
- 0.007936296939849853,
199
- 0.007782057762145996,
200
- 0.008169257164001464,
201
- 0.008333256721496582,
202
- 0.008187175750732422,
203
- 0.007963656902313232,
204
- 0.007835497856140137,
205
- 0.008035337448120117,
206
- 0.00842141628265381,
207
- 0.007827817916870117,
208
- 0.008047817230224609,
209
- 0.007918537139892578,
210
- 0.007920297145843505,
211
- 0.00785549783706665,
212
- 0.0077500581741333004,
213
- 0.007828617095947265,
214
- 0.007621898174285889,
215
- 0.007884458065032959,
216
- 0.007956616878509522,
217
- 0.008085577011108399,
218
- 0.007861577033996581,
219
- 0.007605738162994384,
220
- 0.007617417812347412,
221
- 0.007625578880310058,
222
- 0.007645258903503418,
223
- 0.007639179229736328,
224
- 0.007608938217163086,
225
- 0.00780269718170166,
226
- 0.007619338989257813,
227
- 0.007716618061065674,
228
- 0.007950697898864746,
229
- 0.007956778049468994,
230
- 0.00776397705078125,
231
- 0.007628939151763916,
232
- 0.007614058971405029,
233
- 0.007751017093658447,
234
- 0.007752617835998535,
235
- 0.008118857383728027,
236
- 0.008220616340637208,
237
- 0.008151657104492187,
238
- 0.007757897853851318,
239
- 0.007781417846679688,
240
- 0.008090216636657715,
241
- 0.00817389678955078,
242
- 0.008008777618408203,
243
- 0.008033416748046874,
244
- 0.007996617794036865,
245
- 0.0076855778694152836,
246
- 0.007931818008422851,
247
- 0.007773897171020508,
248
- 0.00795533800125122,
249
- 0.007633738994598389,
250
- 0.00780797815322876,
251
- 0.007776457786560059,
252
- 0.007824137210845947,
253
- 0.007616299152374268,
254
- 0.007625419139862061,
255
- 0.007583658218383789,
256
- 0.007629417896270752,
257
- 0.007746377944946289,
258
- 0.007998056888580322,
259
- 0.00790365695953369,
260
- 0.00823421573638916,
261
- 0.00823133659362793,
262
- 0.008206855773925782,
263
- 0.008457096099853515,
264
- 0.00863613510131836,
265
- 0.008542216300964356,
266
- 0.008384776115417481,
267
- 0.00806589698791504,
268
- 0.00827277660369873,
269
- 0.008544775009155273,
270
- 0.008629895210266114,
271
- 0.008634056091308594,
272
- 0.008625575065612793,
273
- 0.008693095207214355,
274
- 0.008623334884643554,
275
- 0.008642695426940917,
276
- 0.008617094993591309
 
 
 
 
 
 
 
 
 
 
 
 
 
277
  ]
278
  },
279
  "throughput": {
280
  "unit": "samples/s",
281
- "value": 125.74251711667665
282
  },
283
  "energy": null,
284
  "efficiency": null
 
3
  "name": "cuda_inference_timm_image-classification_timm/resnet50.a1_in1k",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.2.2+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "timm",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 984.252416,
108
+ "max_global_vram": 409.178112,
109
+ "max_process_vram": 47767.662592,
110
  "max_reserved": 123.731968,
111
  "max_allocated": 102.475264
112
  },
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.439666015625,
117
+ "mean": 7.439666015625,
118
  "stdev": 0.0,
119
+ "p50": 7.439666015625,
120
+ "p90": 7.439666015625,
121
+ "p95": 7.439666015625,
122
+ "p99": 7.439666015625,
123
  "values": [
124
+ 7.439666015625
125
  ]
126
  },
127
  "throughput": null,
 
131
  "forward": {
132
  "memory": {
133
  "unit": "MB",
134
+ "max_ram": 1842.884608,
135
+ "max_global_vram": 520.507392,
136
+ "max_process_vram": 248670.920704,
137
  "max_reserved": 148.897792,
138
  "max_allocated": 113.516032
139
  },
140
  "latency": {
141
  "unit": "s",
142
+ "count": 139,
143
+ "total": 0.9945551228523253,
144
+ "mean": 0.007155072826275723,
145
+ "stdev": 0.0003416737662471699,
146
+ "p50": 0.00705470085144043,
147
+ "p90": 0.007446220970153809,
148
+ "p95": 0.007520107841491699,
149
+ "p99": 0.007867479906082154,
150
  "values": [
151
+ 0.007560140132904053,
152
+ 0.007515659809112549,
153
+ 0.007385419845581055,
154
+ 0.007470540046691895,
155
+ 0.00747245979309082,
156
+ 0.007440141201019287,
157
+ 0.00749454116821289,
158
+ 0.007386701107025146,
159
+ 0.007359499931335449,
160
+ 0.007385580062866211,
161
+ 0.007415020942687988,
162
+ 0.007287341117858887,
163
+ 0.007179340839385986,
164
+ 0.007135180950164795,
165
+ 0.007113901138305664,
166
+ 0.007087020874023437,
167
+ 0.007102701187133789,
168
+ 0.007136620998382568,
169
+ 0.007105101108551026,
170
+ 0.007113901138305664,
171
+ 0.007095820903778076,
172
+ 0.007086221218109131,
173
+ 0.007133261203765869,
174
+ 0.007105101108551026,
175
+ 0.01053293228149414,
176
+ 0.007473259925842285,
177
+ 0.007033580780029297,
178
+ 0.0070409421920776365,
179
+ 0.0072527799606323246,
180
+ 0.007021101951599121,
181
+ 0.0070604619979858394,
182
+ 0.007030220985412597,
183
+ 0.007044140815734863,
184
+ 0.007034060955047608,
185
+ 0.007047341823577881,
186
+ 0.007028302192687988,
187
+ 0.007029901027679444,
188
+ 0.007063180923461914,
189
+ 0.007030381202697754,
190
+ 0.007020781993865967,
191
+ 0.00701134204864502,
192
+ 0.007014541149139404,
193
+ 0.007053421020507812,
194
+ 0.0070166211128234865,
195
+ 0.007001582145690918,
196
+ 0.007026061058044434,
197
+ 0.00729262113571167,
198
+ 0.007053582191467285,
199
+ 0.007038701057434082,
200
+ 0.0071236610412597655,
201
+ 0.007012940883636474,
202
+ 0.007190701007843018,
203
+ 0.007067340850830078,
204
+ 0.007059660911560059,
205
+ 0.007051180839538574,
206
+ 0.00722606086730957,
207
+ 0.007044140815734863,
208
+ 0.007078061103820801,
209
+ 0.007054222106933594,
210
+ 0.007066861152648925,
211
+ 0.007079980850219727,
212
+ 0.007076140880584717,
213
+ 0.007036460876464844,
214
+ 0.007049420833587647,
215
+ 0.0070145421028137206,
216
+ 0.00708910083770752,
217
+ 0.007076301097869873,
218
+ 0.007056140899658203,
219
+ 0.0070380611419677734,
220
+ 0.007079660892486572,
221
+ 0.0070623822212219235,
222
+ 0.007028141975402832,
223
+ 0.007045741081237793,
224
+ 0.0070550208091735836,
225
+ 0.007022700786590576,
226
+ 0.0070455818176269535,
227
+ 0.0069729418754577634,
228
+ 0.006980620861053467,
229
+ 0.007095820903778076,
230
+ 0.0070603017807006835,
231
+ 0.007048302173614502,
232
+ 0.007060461044311524,
233
+ 0.007053740978240967,
234
+ 0.0070617408752441406,
235
+ 0.0070431809425354,
236
+ 0.007039661884307861,
237
+ 0.007049901008605957,
238
+ 0.007051980972290039,
239
+ 0.007053101062774658,
240
+ 0.007031661033630371,
241
+ 0.0070334219932556155,
242
+ 0.007064940929412841,
243
+ 0.007003180980682373,
244
+ 0.006965901851654053,
245
+ 0.007091661930084228,
246
+ 0.006988780975341797,
247
+ 0.007095180988311768,
248
+ 0.006980300903320313,
249
+ 0.007075662136077881,
250
+ 0.006985901832580566,
251
+ 0.007074541091918946,
252
+ 0.00705470085144043,
253
+ 0.007038381099700928,
254
+ 0.007053741931915283,
255
+ 0.007656779766082764,
256
+ 0.007838698863983154,
257
+ 0.007860138893127441,
258
+ 0.007871979236602784,
259
+ 0.0076369400024414065,
260
+ 0.007240621089935303,
261
+ 0.007404300212860107,
262
+ 0.0074759812355041505,
263
+ 0.007510860919952392,
264
+ 0.007330541133880615,
265
+ 0.007037741184234619,
266
+ 0.007048780918121338,
267
+ 0.007046381950378418,
268
+ 0.007042061805725097,
269
+ 0.0070569410324096676,
270
+ 0.007051980972290039,
271
+ 0.007051821231842041,
272
+ 0.007021582126617432,
273
+ 0.007060781955718994,
274
+ 0.007061581134796143,
275
+ 0.007055340766906738,
276
+ 0.007037100791931153,
277
+ 0.007041581153869629,
278
+ 0.007029101848602295,
279
+ 0.006996780872344971,
280
+ 0.006938861846923828,
281
+ 0.007046061992645264,
282
+ 0.0070263810157775875,
283
+ 0.007032461166381836,
284
+ 0.007040141105651855,
285
+ 0.007018062114715576,
286
+ 0.007033259868621826,
287
+ 0.007227341175079346,
288
+ 0.007033421039581299,
289
+ 0.006933741092681885
290
  ]
291
  },
292
  "throughput": {
293
  "unit": "samples/s",
294
+ "value": 139.76098137361777
295
  },
296
  "energy": null,
297
  "efficiency": null