IlyasMoutawwakil HF staff commited on
Commit
f4516b7
·
verified ·
1 Parent(s): fb4ac26

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 751.202304,
108
  "max_global_vram": 1403.518976,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 773.849088,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.4032177734375,
117
- "mean": 7.4032177734375,
118
  "stdev": 0.0,
119
- "p50": 7.4032177734375,
120
- "p90": 7.4032177734375,
121
- "p95": 7.4032177734375,
122
- "p99": 7.4032177734375,
123
  "values": [
124
- 7.4032177734375
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 1.084165284027863e-06,
131
- "ram": 5.777204644630876e-07,
132
- "gpu": 1.6552791019999041e-06,
133
- "total": 3.3171648504908543e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 1034.207232,
141
  "max_global_vram": 1434.976256,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 794.820608,
@@ -145,101 +145,103 @@
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 70,
149
- "total": 1.0015548772811889,
150
- "mean": 0.014307926818302698,
151
- "stdev": 0.00047496604955924647,
152
- "p50": 0.014332416057586669,
153
- "p90": 0.014924902725219727,
154
- "p95": 0.015053670692443847,
155
- "p99": 0.015539293546676637,
156
  "values": [
157
- 0.015364095687866211,
158
- 0.015890432357788087,
159
- 0.014955519676208496,
160
- 0.014837759971618653,
161
- 0.014420991897583007,
162
- 0.014461952209472656,
163
- 0.014333951950073242,
164
- 0.014834688186645508,
165
- 0.014832672119140624,
166
- 0.014548992156982422,
167
- 0.014412799835205077,
168
- 0.014657535552978516,
169
- 0.014650367736816406,
170
- 0.014462976455688477,
171
- 0.014498815536499024,
172
- 0.01447321605682373,
173
- 0.014330880165100097,
174
- 0.014358528137207031,
175
- 0.014674880027770997,
176
- 0.014603263854980468,
177
- 0.014693375587463378,
178
- 0.014735360145568848,
179
- 0.014477312088012695,
180
- 0.015049728393554687,
181
- 0.014511103630065919,
182
- 0.014438400268554688,
183
- 0.014420991897583007,
184
- 0.014458880424499512,
185
- 0.014620672225952149,
186
- 0.014348320007324219,
187
- 0.014536735534667968,
188
- 0.014839808464050292,
189
- 0.014922752380371093,
190
- 0.014944255828857422,
191
- 0.015381535530090333,
192
- 0.014256128311157227,
193
- 0.01407692813873291,
 
 
 
 
 
 
 
 
 
 
194
  0.013928447723388672,
195
- 0.013797375679016113,
196
- 0.013799424171447755,
 
 
 
197
  0.013773823738098144,
198
- 0.013868032455444336,
199
- 0.013856767654418945,
200
- 0.01400115203857422,
201
- 0.014078911781311034,
202
- 0.015056896209716796,
203
- 0.014229503631591797,
204
- 0.014142463684082032,
205
- 0.014125056266784668,
206
- 0.013965312004089356,
207
- 0.013896703720092773,
208
- 0.013825023651123047,
209
- 0.01385267162322998,
210
- 0.013794303894042969,
211
- 0.013808639526367187,
212
- 0.013821951866149903,
213
- 0.013757439613342285,
214
- 0.013831135749816894,
215
- 0.013767680168151856,
216
- 0.013910016059875489,
217
  0.01384447956085205,
218
- 0.013894656181335448,
219
- 0.013851648330688476,
220
- 0.013841376304626465,
221
- 0.013797375679016113,
222
- 0.013812735557556152,
223
- 0.01388748836517334,
224
- 0.01386291217803955,
225
- 0.013770751953125,
226
- 0.013786111831665039
227
  ]
228
  },
229
  "throughput": {
230
  "unit": "samples/s",
231
- "value": 69.89132756262075
232
  },
233
  "energy": {
234
  "unit": "kWh",
235
- "cpu": 1.646529046392772e-07,
236
- "ram": 9.000423442123114e-08,
237
- "gpu": 3.4302110775000374e-07,
238
- "total": 5.97678246810512e-07
239
  },
240
  "efficiency": {
241
  "unit": "samples/kWh",
242
- "value": 1673141.0342211102
243
  }
244
  }
245
  }
 
3
  "name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 773.009408,
108
  "max_global_vram": 1403.518976,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 773.849088,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.44810693359375,
117
+ "mean": 7.44810693359375,
118
  "stdev": 0.0,
119
+ "p50": 7.44810693359375,
120
+ "p90": 7.44810693359375,
121
+ "p95": 7.44810693359375,
122
+ "p99": 7.44810693359375,
123
  "values": [
124
+ 7.44810693359375
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 1.1741812166666441e-06,
131
+ "ram": 6.102007100753228e-07,
132
+ "gpu": 1.6413902020001939e-06,
133
+ "total": 3.425772128742161e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 1160.474624,
141
  "max_global_vram": 1434.976256,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 794.820608,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 72,
149
+ "total": 1.003815077781677,
150
+ "mean": 0.013941876080301072,
151
+ "stdev": 0.00048217115905604334,
152
+ "p50": 0.013852159976959228,
153
+ "p90": 0.014111129760742186,
154
+ "p95": 0.014679910850524903,
155
+ "p99": 0.016192235984802246,
156
  "values": [
157
+ 0.016364543914794923,
158
+ 0.016121856689453123,
159
+ 0.014867456436157226,
160
+ 0.015255552291870117,
161
+ 0.013983743667602539,
162
+ 0.013454336166381836,
163
+ 0.013618176460266113,
164
+ 0.013451264381408692,
165
+ 0.013429759979248047,
166
+ 0.013752320289611816,
167
+ 0.013468671798706054,
168
+ 0.013660160064697266,
169
+ 0.013445119857788086,
170
+ 0.01337548828125,
171
+ 0.013477888107299805,
172
+ 0.013519935607910157,
173
+ 0.014008319854736329,
174
+ 0.013850624084472657,
175
+ 0.013873151779174805,
176
+ 0.014002240180969239,
177
+ 0.013946880340576171,
178
+ 0.01390182399749756,
179
+ 0.013788160324096679,
180
+ 0.013993984222412109,
181
+ 0.013886431694030761,
182
+ 0.014003199577331543,
183
+ 0.013800512313842774,
184
+ 0.014116864204406738,
185
+ 0.01405951976776123,
186
+ 0.013836288452148437,
187
+ 0.013883392333984374,
188
+ 0.013988863945007325,
189
+ 0.013836288452148437,
190
+ 0.01396019172668457,
191
+ 0.013794303894042969,
192
+ 0.013947903633117676,
193
+ 0.013881343841552735,
194
+ 0.01387110424041748,
195
+ 0.013760512351989745,
196
+ 0.013830080032348632,
197
+ 0.013889535903930664,
198
+ 0.013853695869445801,
199
+ 0.013925375938415528,
200
+ 0.013815808296203613,
201
+ 0.013819904327392578,
202
+ 0.013993984222412109,
203
+ 0.01376460838317871,
204
  0.013928447723388672,
205
+ 0.013818880081176758,
206
+ 0.013835264205932616,
207
+ 0.013920255661010742,
208
+ 0.013783040046691895,
209
+ 0.013902848243713378,
210
  0.013773823738098144,
211
+ 0.013789183616638183,
212
+ 0.01389363193511963,
213
+ 0.013720576286315917,
214
+ 0.014045184135437011,
215
+ 0.013820992469787598,
216
+ 0.01376972770690918,
217
+ 0.01376460838317871,
218
+ 0.014486528396606446,
219
+ 0.014526464462280274,
220
+ 0.013873151779174805,
221
+ 0.013741056442260742,
222
+ 0.013819904327392578,
 
 
 
 
 
 
 
223
  0.01384447956085205,
224
+ 0.013950943946838379,
225
+ 0.013810720443725586,
226
+ 0.013747200012207032,
227
+ 0.01389363193511963,
228
+ 0.014223360061645507
 
 
 
 
229
  ]
230
  },
231
  "throughput": {
232
  "unit": "samples/s",
233
+ "value": 71.72635836384548
234
  },
235
  "energy": {
236
  "unit": "kWh",
237
+ "cpu": 1.7318404730274888e-07,
238
+ "ram": 9.462063404866304e-08,
239
+ "gpu": 3.5393345544927224e-07,
240
+ "total": 6.217381368006841e-07
241
  },
242
  "efficiency": {
243
  "unit": "samples/kWh",
244
+ "value": 1608394.1788511816
245
  }
246
  }
247
  }