IlyasMoutawwakil HF staff commited on
Commit
3a5e963
·
verified ·
1 Parent(s): 2140428

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -83,7 +83,7 @@
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.0",
86
- "optimum_benchmark_commit": "4ec62071e1c9b9c89fb7e3c044340b391a0c4120",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
@@ -102,157 +102,165 @@
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
- "max_ram": 1007.190016,
106
- "max_global_vram": 1877.520384,
107
- "max_process_vram": 234487.123968,
108
  "max_reserved": 555.74528,
109
  "max_allocated": 499.443712
110
  },
111
  "latency": {
112
  "unit": "s",
113
- "count": 129,
114
- "total": 0.9963395566940308,
115
- "mean": 0.007723562454992486,
116
- "stdev": 9.508950416811287e-05,
117
- "p50": 0.00772273588180542,
118
- "p90": 0.007785038948059082,
119
- "p95": 0.007806287002563477,
120
- "p99": 0.007878249263763428,
121
  "values": [
122
- 0.007804656028747558,
123
- 0.007755535125732422,
124
- 0.007805614948272705,
125
- 0.007823535919189453,
126
- 0.007861774921417236,
127
- 0.007884655952453613,
128
- 0.007841935157775879,
129
- 0.007760655879974365,
130
- 0.007806735038757324,
131
- 0.007757935047149658,
132
- 0.007739055156707764,
133
- 0.00768993616104126,
134
- 0.007765134811401367,
135
- 0.007712015151977539,
136
- 0.007705134868621826,
137
- 0.007712015151977539,
138
- 0.00768081521987915,
139
- 0.007600655078887939,
140
- 0.007661135196685791,
141
- 0.007685776233673096,
142
- 0.00764817476272583,
143
- 0.00763937520980835,
144
- 0.007685615062713623,
145
- 0.007756334781646728,
146
- 0.007745615005493164,
147
- 0.0077628960609436035,
148
- 0.007789774894714356,
149
- 0.007759055137634277,
150
- 0.007731375217437744,
151
- 0.007713136196136475,
152
- 0.007836814880371093,
153
- 0.007741934776306152,
154
- 0.007771056175231933,
155
- 0.007759055137634277,
156
- 0.007754415035247803,
157
- 0.00773761510848999,
158
- 0.007773615837097168,
159
- 0.0077459349632263185,
160
- 0.007760334968566895,
161
- 0.007754254817962647,
162
- 0.007742094993591309,
163
- 0.0077747349739074705,
164
- 0.007782735824584961,
165
- 0.0077654547691345215,
166
- 0.007772655010223389,
167
- 0.007777775764465332,
168
- 0.00780481481552124,
169
- 0.0077838549613952635,
170
- 0.007801775932312012,
171
- 0.007744655132293701,
172
- 0.007789774894714356,
173
- 0.007746095180511474,
174
- 0.007730576038360595,
175
- 0.007687534809112549,
176
- 0.007698575019836426,
177
- 0.007704814910888672,
178
- 0.007700815200805664,
179
- 0.007732655048370361,
180
- 0.007712016105651856,
181
- 0.007689774990081787,
182
- 0.0076843352317810054,
183
- 0.007695535182952881,
184
- 0.0076040148735046385,
185
- 0.007661294937133789,
186
- 0.0075348949432373045,
187
- 0.0072403340339660645,
188
- 0.00727665376663208,
189
- 0.008356817245483398,
190
- 0.007758415222167968,
191
- 0.007685454845428466,
192
- 0.007572655200958252,
193
- 0.007718414783477783,
194
- 0.0077128162384033205,
195
- 0.007699534893035888,
196
- 0.007697935104370117,
197
- 0.007715694904327393,
198
- 0.007702095031738281,
199
- 0.007666574954986572,
200
- 0.007705296039581299,
201
- 0.0076889748573303225,
202
- 0.007697454929351807,
203
- 0.007723694801330567,
204
- 0.007692174911499023,
205
- 0.007695374965667724,
206
- 0.007664014816284179,
207
- 0.007670414924621582,
208
- 0.007728334903717041,
209
- 0.007706894874572754,
210
- 0.007711854934692383,
211
- 0.007715374946594238,
212
- 0.00772273588180542,
213
- 0.007685774803161621,
214
- 0.007681455135345459,
215
- 0.007709774971008301,
216
- 0.007760175228118896,
217
- 0.007694415092468262,
218
- 0.0077036948204040525,
219
- 0.0076700949668884275,
220
- 0.007704174995422363,
221
- 0.0076584148406982425,
222
- 0.0076748948097229005,
223
- 0.007729135036468506,
224
- 0.0077016158103942875,
225
- 0.007735215187072754,
226
- 0.007720495223999023,
227
- 0.007715054988861084,
228
- 0.007768815994262696,
229
- 0.0077502551078796385,
230
- 0.007728654861450196,
231
- 0.007700174808502197,
232
- 0.007778255939483642,
233
- 0.007759695053100586,
234
- 0.007731054782867432,
235
- 0.007724975109100342,
236
- 0.0077340960502624515,
237
- 0.007751054763793945,
238
- 0.007758735179901123,
239
- 0.007718894958496094,
240
- 0.007756336212158203,
241
- 0.007729774951934815,
242
- 0.007714254856109619,
243
- 0.007723374843597412,
244
- 0.0077390561103820804,
245
- 0.007697134971618652,
246
- 0.007734095096588135,
247
- 0.007714573860168457,
248
- 0.007708654880523681,
249
- 0.007691854953765869,
250
- 0.007711054801940918
 
 
 
 
 
 
 
 
251
  ]
252
  },
253
  "throughput": {
254
  "unit": "samples/s",
255
- "value": 129.4739319876417
256
  },
257
  "energy": null,
258
  "efficiency": null
 
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.0",
86
+ "optimum_benchmark_commit": "e65976e2695b67f37a76baa42b75347a3a733547",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 1008.586752,
106
+ "max_global_vram": 1877.516288,
107
+ "max_process_vram": 232966.889472,
108
  "max_reserved": 555.74528,
109
  "max_allocated": 499.443712
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 137,
114
+ "total": 0.9973862705230712,
115
+ "mean": 0.0072801917556428565,
116
+ "stdev": 0.00023952705644979445,
117
+ "p50": 0.007160490989685059,
118
+ "p90": 0.00759425106048584,
119
+ "p95": 0.007729419231414795,
120
+ "p99": 0.008023986015319824,
121
  "values": [
122
+ 0.007745292186737061,
123
+ 0.007477130889892578,
124
+ 0.007263689994812012,
125
+ 0.007044650077819824,
126
+ 0.007075210094451904,
127
+ 0.007119690895080567,
128
+ 0.0071025710105896,
129
+ 0.007162409782409668,
130
+ 0.007113931179046631,
131
+ 0.007097609996795654,
132
+ 0.007102410793304443,
133
+ 0.007073929786682129,
134
+ 0.0070678510665893556,
135
+ 0.007101930141448974,
136
+ 0.007122090816497803,
137
+ 0.00716417121887207,
138
+ 0.007247690200805664,
139
+ 0.007101931095123291,
140
+ 0.007119850158691406,
141
+ 0.007075049877166748,
142
+ 0.007120650768280029,
143
+ 0.00708912992477417,
144
+ 0.007138570785522461,
145
+ 0.007095369815826416,
146
+ 0.007122251033782959,
147
+ 0.007096010208129883,
148
+ 0.007151371002197266,
149
+ 0.007112330913543701,
150
+ 0.00715680980682373,
151
+ 0.008091691970825196,
152
+ 0.007667212009429931,
153
+ 0.007131370067596435,
154
+ 0.0070819311141967775,
155
+ 0.007105770111083984,
156
+ 0.007234890937805175,
157
+ 0.007438411235809326,
158
+ 0.007288811206817627,
159
+ 0.007059529781341552,
160
+ 0.007088490962982178,
161
+ 0.0070894498825073244,
162
+ 0.007080009937286377,
163
+ 0.0073057708740234375,
164
+ 0.007294411182403564,
165
+ 0.007049449920654297,
166
+ 0.007095530986785888,
167
+ 0.007088329792022705,
168
+ 0.007089611053466797,
169
+ 0.0070612897872924804,
170
+ 0.00709873104095459,
171
+ 0.007068170070648193,
172
+ 0.007080330848693848,
173
+ 0.007079209804534912,
174
+ 0.007053610801696778,
175
+ 0.007223851203918457,
176
+ 0.007242090225219726,
177
+ 0.00704577112197876,
178
+ 0.007083690166473389,
179
+ 0.007020330905914306,
180
+ 0.007039370059967041,
181
+ 0.007156811237335205,
182
+ 0.007258730888366699,
183
+ 0.007137290000915527,
184
+ 0.00707905101776123,
185
+ 0.007047530174255371,
186
+ 0.007334890842437744,
187
+ 0.007160490989685059,
188
+ 0.0075468912124633785,
189
+ 0.007918571949005127,
190
+ 0.007079850196838379,
191
+ 0.0070592107772827145,
192
+ 0.007590091228485108,
193
+ 0.007602090835571289,
194
+ 0.007431050777435303,
195
+ 0.007401771068572998,
196
+ 0.007601771831512451,
197
+ 0.0075577712059021,
198
+ 0.007725450992584228,
199
+ 0.0074806509017944335,
200
+ 0.007617610931396485,
201
+ 0.007434091091156006,
202
+ 0.007472970962524414,
203
+ 0.007379371166229248,
204
+ 0.007331050872802734,
205
+ 0.007425450801849366,
206
+ 0.007289930820465088,
207
+ 0.007331210136413574,
208
+ 0.0073494501113891605,
209
+ 0.0073828911781311036,
210
+ 0.0073737711906433105,
211
+ 0.0073449711799621585,
212
+ 0.007259050846099854,
213
+ 0.007452331066131592,
214
+ 0.007371211051940918,
215
+ 0.007443050861358643,
216
+ 0.007668170928955078,
217
+ 0.007532970905303955,
218
+ 0.007547211170196533,
219
+ 0.007426731109619141,
220
+ 0.007456651210784912,
221
+ 0.0074880108833312985,
222
+ 0.0075939311981201175,
223
+ 0.007450251102447509,
224
+ 0.007322090148925781,
225
+ 0.007592172145843506,
226
+ 0.007262570858001709,
227
+ 0.007305769920349121,
228
+ 0.007168810844421386,
229
+ 0.007153131008148193,
230
+ 0.007122729778289795,
231
+ 0.00717409086227417,
232
+ 0.007112810134887695,
233
+ 0.007135529994964599,
234
+ 0.007139531135559082,
235
+ 0.007098090171813965,
236
+ 0.007112650871276855,
237
+ 0.007113770008087158,
238
+ 0.007081770896911621,
239
+ 0.007092329978942871,
240
+ 0.0070568108558654785,
241
+ 0.00710000991821289,
242
+ 0.007066411018371582,
243
+ 0.007037290096282959,
244
+ 0.007108651161193848,
245
+ 0.007118889808654785,
246
+ 0.0070816102027893065,
247
+ 0.007052969932556152,
248
+ 0.007987690925598144,
249
+ 0.007747372150421142,
250
+ 0.007503850936889649,
251
+ 0.007542730808258057,
252
+ 0.00750321102142334,
253
+ 0.007503850936889649,
254
+ 0.007594730854034424,
255
+ 0.00744209098815918,
256
+ 0.007473771095275879,
257
+ 0.00802721118927002,
258
+ 0.008018252372741699
259
  ]
260
  },
261
  "throughput": {
262
  "unit": "samples/s",
263
+ "value": 137.35901931771272
264
  },
265
  "energy": null,
266
  "efficiency": null