IlyasMoutawwakil HF staff commited on
Commit
537d2ff
·
verified ·
1 Parent(s): d070b6f

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -83,7 +83,7 @@
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.0",
86
- "optimum_benchmark_commit": "77e62a3eef699bce70248c56d35c703369938b85",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
@@ -102,167 +102,163 @@
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
- "max_ram": 1008.959488,
106
- "max_global_vram": 2522.836992,
107
- "max_process_vram": 226898.518016,
108
  "max_reserved": 555.74528,
109
  "max_allocated": 499.443712
110
  },
111
  "latency": {
112
  "unit": "s",
113
- "count": 139,
114
- "total": 0.9981516461372376,
115
- "mean": 0.0071809470945125,
116
- "stdev": 0.0003183605302359094,
117
- "p50": 0.007091347217559814,
118
- "p90": 0.007381938934326172,
119
- "p95": 0.007519570779800414,
120
- "p99": 0.007941451139450073,
121
  "values": [
122
- 0.007919826030731202,
123
- 0.007084307193756104,
124
- 0.007131347179412842,
125
- 0.007205427169799805,
126
- 0.007233266830444336,
127
- 0.007191027164459228,
128
- 0.007468945980072021,
129
- 0.007454866886138916,
130
- 0.007448145866394043,
131
- 0.007505746841430664,
132
- 0.00739422607421875,
133
- 0.007310067176818848,
134
- 0.007332306861877441,
135
- 0.0072875070571899415,
136
- 0.007303826808929444,
137
- 0.007191826820373535,
138
- 0.00722318696975708,
139
- 0.0071572670936584475,
140
- 0.0071598272323608395,
141
- 0.007091347217559814,
142
- 0.0070843081474304195,
143
- 0.007174868106842041,
144
- 0.007135988235473633,
145
- 0.007057907104492188,
146
- 0.007338547229766846,
147
- 0.007123188018798828,
148
- 0.007170707225799561,
149
- 0.007203987121582031,
150
- 0.00716206693649292,
151
- 0.007123666763305664,
152
- 0.0071027069091796876,
153
- 0.007140787124633789,
154
- 0.007090867042541504,
155
- 0.007150866985321045,
156
- 0.007123666763305664,
157
- 0.007299187183380127,
158
- 0.007139347076416016,
159
- 0.007074546813964844,
160
- 0.00709486722946167,
161
- 0.007080627918243408,
162
- 0.007022387981414795,
163
- 0.007189427852630615,
164
- 0.010238061904907227,
165
- 0.007643986225128174,
166
- 0.007075346946716308,
167
- 0.007070868015289306,
168
- 0.007056628227233887,
169
- 0.007053427219390869,
170
- 0.007045746803283692,
171
- 0.0070311870574951175,
172
- 0.006975828170776367,
173
- 0.007052146911621094,
174
- 0.00706318712234497,
175
- 0.007262227058410645,
176
- 0.007053906917572021,
177
- 0.007051348209381103,
178
- 0.007378867149353027,
179
- 0.007138707160949707,
180
- 0.007047347068786621,
181
- 0.007018708229064942,
182
- 0.007068787097930908,
183
- 0.006990386962890625,
184
- 0.007085906982421875,
185
- 0.007226867198944092,
186
- 0.007213266849517822,
187
- 0.007223027229309082,
188
- 0.0070721468925476075,
189
- 0.007887666225433349,
190
- 0.007074387073516846,
191
- 0.006992147922515869,
192
- 0.007117907047271729,
193
- 0.007012466907501221,
194
- 0.007091506958007813,
195
- 0.007059826850891114,
196
- 0.007017268180847168,
197
- 0.00707470703125,
198
- 0.007002226829528809,
199
- 0.007013906955718994,
200
- 0.007130387783050537,
201
- 0.007081266880035401,
202
- 0.006995666980743409,
203
- 0.007040787220001221,
204
- 0.007078708171844483,
205
- 0.007096628189086914,
206
- 0.007026226997375489,
207
- 0.0070630269050598144,
208
- 0.007061906814575195,
209
- 0.007101906776428223,
210
- 0.007059668064117432,
211
- 0.007103346824645996,
212
- 0.007100626945495605,
213
- 0.007048787117004395,
214
- 0.007042226791381836,
215
- 0.007047028064727783,
216
- 0.007106067180633545,
217
- 0.007049586772918701,
218
- 0.007069427013397217,
219
- 0.007051667213439941,
220
- 0.007094868183135986,
221
- 0.007091667175292969,
222
- 0.007081747055053711,
223
- 0.007106866836547852,
224
- 0.007089587211608886,
225
- 0.007068147182464599,
226
- 0.006969107151031494,
227
- 0.007127027988433838,
228
- 0.007092628002166748,
229
- 0.007051667213439941,
230
- 0.007000786781311035,
231
- 0.0069948678016662595,
232
- 0.007072627067565918,
233
- 0.0070790271759033204,
234
- 0.007019987106323242,
235
- 0.007085426807403565,
236
- 0.007112627029418945,
237
- 0.0070659079551696775,
238
- 0.007070547103881836,
239
- 0.00706270694732666,
240
- 0.007078707218170166,
241
- 0.0070316681861877445,
242
- 0.007048947811126709,
243
- 0.007074546813964844,
244
- 0.007846705913543701,
245
- 0.007954705238342285,
246
- 0.0077140660285949705,
247
- 0.007284467220306397,
248
- 0.007434707164764404,
249
- 0.007435986042022705,
250
- 0.007326547145843506,
251
- 0.007276947021484375,
252
- 0.007210066795349121,
253
- 0.0072801470756530765,
254
- 0.007296466827392578,
255
- 0.007345586776733398,
256
- 0.007026226997375489,
257
- 0.00707710599899292,
258
- 0.0069875078201293945,
259
- 0.007039506912231445,
260
- 0.0070275068283081055
261
  ]
262
  },
263
  "throughput": {
264
  "unit": "samples/s",
265
- "value": 139.2573969475663
266
  },
267
  "energy": null,
268
  "efficiency": null
 
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.0",
86
+ "optimum_benchmark_commit": "b04fb3c909a5873eadf03d7b46ccfac63afcdf9e",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 1008.951296,
106
+ "max_global_vram": 1877.52448,
107
+ "max_process_vram": 206411.579392,
108
  "max_reserved": 555.74528,
109
  "max_allocated": 499.443712
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 135,
114
+ "total": 0.998494976520538,
115
+ "mean": 0.007396259085337322,
116
+ "stdev": 0.0005560047040005829,
117
+ "p50": 0.0071492919921875,
118
+ "p90": 0.007955566120147705,
119
+ "p95": 0.00867038230895996,
120
+ "p99": 0.008967957706451415,
121
  "values": [
122
+ 0.007131852149963379,
123
+ 0.006895532131195068,
124
+ 0.006979691982269287,
125
+ 0.0072248120307922365,
126
+ 0.0073108930587768555,
127
+ 0.007299051761627197,
128
+ 0.007246892929077148,
129
+ 0.0071492919921875,
130
+ 0.0071140918731689455,
131
+ 0.00709201192855835,
132
+ 0.007101451873779297,
133
+ 0.007077291965484619,
134
+ 0.007003372192382813,
135
+ 0.0069972920417785646,
136
+ 0.006967691898345947,
137
+ 0.006952971935272217,
138
+ 0.006962731838226318,
139
+ 0.006980011940002442,
140
+ 0.0069502520561218264,
141
+ 0.006957931995391846,
142
+ 0.006997451782226562,
143
+ 0.006960492134094238,
144
+ 0.006998091220855713,
145
+ 0.00694257116317749,
146
+ 0.006963372230529785,
147
+ 0.006957292079925537,
148
+ 0.0069833722114562985,
149
+ 0.006930411815643311,
150
+ 0.006956972122192383,
151
+ 0.006987212181091308,
152
+ 0.006958891868591308,
153
+ 0.006942252159118653,
154
+ 0.008157293319702148,
155
+ 0.007091691970825195,
156
+ 0.006846570968627929,
157
+ 0.0068296117782592775,
158
+ 0.006784331798553467,
159
+ 0.006835371017456055,
160
+ 0.006799531936645508,
161
+ 0.006817611217498779,
162
+ 0.006774890899658203,
163
+ 0.006829291820526123,
164
+ 0.006852491855621338,
165
+ 0.006780011177062988,
166
+ 0.006815211772918701,
167
+ 0.006845292091369629,
168
+ 0.0068452911376953125,
169
+ 0.00681793212890625,
170
+ 0.007654092788696289,
171
+ 0.007739052772521972,
172
+ 0.00770801305770874,
173
+ 0.00816417407989502,
174
+ 0.008820494651794434,
175
+ 0.008920016288757325,
176
+ 0.008685613632202148,
177
+ 0.008817774772644043,
178
+ 0.008992654800415039,
179
+ 0.00913697624206543,
180
+ 0.00881617546081543,
181
+ 0.0085867338180542,
182
+ 0.008180974006652832,
183
+ 0.007956014156341553,
184
+ 0.00789633321762085,
185
+ 0.007927534103393554,
186
+ 0.007858572959899902,
187
+ 0.007857294082641601,
188
+ 0.007833292961120605,
189
+ 0.008663854598999023,
190
+ 0.00787665319442749,
191
+ 0.007837614059448243,
192
+ 0.007850412845611571,
193
+ 0.007841773986816406,
194
+ 0.00787073278427124,
195
+ 0.007821932792663574,
196
+ 0.00786289405822754,
197
+ 0.007964653015136719,
198
+ 0.007835533142089844,
199
+ 0.007843052864074707,
200
+ 0.0078475341796875,
201
+ 0.007844653129577637,
202
+ 0.007954894065856933,
203
+ 0.00783409309387207,
204
+ 0.007882092952728272,
205
+ 0.007871212959289551,
206
+ 0.007857772827148437,
207
+ 0.007900814056396484,
208
+ 0.007864013195037841,
209
+ 0.007785133838653565,
210
+ 0.006946570873260498,
211
+ 0.00697457218170166,
212
+ 0.006979691982269287,
213
+ 0.006982892036437988,
214
+ 0.006968492031097412,
215
+ 0.006945131778717041,
216
+ 0.006982572078704834,
217
+ 0.006973452091217041,
218
+ 0.006957931995391846,
219
+ 0.007001451969146729,
220
+ 0.006961771965026856,
221
+ 0.006981451034545899,
222
+ 0.00701361083984375,
223
+ 0.0069526519775390625,
224
+ 0.007004332065582275,
225
+ 0.007007051944732666,
226
+ 0.006907371044158936,
227
+ 0.007158092975616455,
228
+ 0.007703052997589111,
229
+ 0.007892492771148682,
230
+ 0.007831374168395995,
231
+ 0.0074169721603393555,
232
+ 0.007376812934875489,
233
+ 0.007319211959838867,
234
+ 0.007316973209381104,
235
+ 0.007301132202148438,
236
+ 0.007462253093719482,
237
+ 0.007400493144989014,
238
+ 0.006982252120971679,
239
+ 0.007004811763763428,
240
+ 0.007015851974487305,
241
+ 0.006964652061462402,
242
+ 0.007502091884613037,
243
+ 0.007905933856964111,
244
+ 0.007935213088989259,
245
+ 0.007833134174346924,
246
+ 0.007420331954956055,
247
+ 0.007312012195587158,
248
+ 0.00729505205154419,
249
+ 0.0073366518020629885,
250
+ 0.007377613067626953,
251
+ 0.007348011970520019,
252
+ 0.00740241289138794,
253
+ 0.007023850917816162,
254
+ 0.007007212162017822,
255
+ 0.0069600119590759275,
256
+ 0.006988972187042236
 
 
 
 
257
  ]
258
  },
259
  "throughput": {
260
  "unit": "samples/s",
261
+ "value": 135.20348441855495
262
  },
263
  "energy": null,
264
  "efficiency": null