IlyasMoutawwakil HF staff commited on
Commit
2c65227
·
verified ·
1 Parent(s): 054c218

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.2.0.dev20231010+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
@@ -103,7 +103,7 @@
103
  "optimum_commit": null,
104
  "timm_version": "1.0.9",
105
  "timm_commit": null,
106
- "peft_version": "0.12.0",
107
  "peft_commit": null
108
  }
109
  },
@@ -111,24 +111,24 @@
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 969.637888,
115
  "max_global_vram": 68702.69952,
116
- "max_process_vram": 48126.877696,
117
  "max_reserved": 773.849088,
118
  "max_allocated": 736.603648
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
- "total": 8.56899609375,
124
- "mean": 8.56899609375,
125
  "stdev": 0.0,
126
- "p50": 8.56899609375,
127
- "p90": 8.56899609375,
128
- "p95": 8.56899609375,
129
- "p99": 8.56899609375,
130
  "values": [
131
- 8.56899609375
132
  ]
133
  },
134
  "throughput": null,
@@ -138,103 +138,99 @@
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 1112.162304,
142
  "max_global_vram": 68702.69952,
143
- "max_process_vram": 297867.3664,
144
  "max_reserved": 773.849088,
145
  "max_allocated": 745.086976
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 75,
150
- "total": 0.9998667469024656,
151
- "mean": 0.01333155662536621,
152
- "stdev": 0.0011665091222886035,
153
- "p50": 0.013509308815002442,
154
- "p90": 0.014350654411315918,
155
- "p95": 0.014611262702941894,
156
- "p99": 0.017455450820922862,
157
  "values": [
158
- 0.014736030578613281,
159
- 0.014557790756225587,
160
- 0.014082429885864257,
161
- 0.014356030464172363,
162
- 0.014385950088500977,
163
- 0.014076990127563476,
164
- 0.014465950965881347,
165
- 0.014033788681030274,
166
- 0.01361298942565918,
167
- 0.013550748825073242,
168
- 0.013390268325805664,
169
- 0.013629788398742676,
170
- 0.013582428932189942,
171
- 0.013545469284057617,
172
- 0.01634211540222168,
173
- 0.013728988647460938,
174
- 0.01434259033203125,
175
- 0.014294110298156738,
176
- 0.01425218963623047,
177
- 0.016955875396728517,
178
- 0.014115710258483887,
179
- 0.013642108917236329,
180
- 0.013666428565979005,
181
- 0.013670108795166015,
182
- 0.013667709350585937,
183
- 0.013665788650512696,
184
- 0.013629307746887207,
185
- 0.013667067527770997,
186
- 0.013636668205261231,
187
- 0.013715549468994141,
188
- 0.013266748428344726,
189
- 0.01321810817718506,
190
- 0.012209944725036621,
191
- 0.012685626029968262,
192
- 0.012313466072082519,
193
- 0.01256834602355957,
194
- 0.01242690658569336,
195
- 0.012410586357116699,
196
- 0.012629306793212891,
197
- 0.0188773193359375,
198
- 0.012596185684204102,
199
- 0.01248306655883789,
200
- 0.01215970516204834,
201
- 0.011919224739074707,
202
- 0.01236258602142334,
203
- 0.011892664909362792,
204
- 0.012564666748046875,
205
- 0.011897945404052734,
206
- 0.01256642723083496,
207
- 0.01194194507598877,
208
- 0.012525786399841308,
209
- 0.012002264976501465,
210
- 0.012231225967407227,
211
- 0.012015865325927735,
212
- 0.012480827331542968,
213
- 0.011914105415344238,
214
- 0.012137785911560058,
215
- 0.011917465209960938,
216
- 0.012121465682983399,
217
- 0.011878424644470215,
218
- 0.012042744636535645,
219
- 0.012937787055969239,
220
- 0.013539069175720215,
221
- 0.013300507545471192,
222
- 0.013470268249511719,
223
- 0.013600349426269531,
224
- 0.013551069259643554,
225
- 0.013609148979187012,
226
- 0.013529467582702636,
227
- 0.01351618766784668,
228
- 0.01357266902923584,
229
- 0.013509308815002442,
230
- 0.01348338794708252,
231
- 0.013508988380432128,
232
- 0.013480828285217286
233
  ]
234
  },
235
  "throughput": {
236
  "unit": "samples/s",
237
- "value": 75.00999531422165
238
  },
239
  "energy": null,
240
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
 
103
  "optimum_commit": null,
104
  "timm_version": "1.0.9",
105
  "timm_commit": null,
106
+ "peft_version": null,
107
  "peft_commit": null
108
  }
109
  },
 
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
+ "max_ram": 1016.344576,
115
  "max_global_vram": 68702.69952,
116
+ "max_process_vram": 46672.257024,
117
  "max_reserved": 773.849088,
118
  "max_allocated": 736.603648
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
+ "total": 10.1493701171875,
124
+ "mean": 10.1493701171875,
125
  "stdev": 0.0,
126
+ "p50": 10.1493701171875,
127
+ "p90": 10.1493701171875,
128
+ "p95": 10.1493701171875,
129
+ "p99": 10.1493701171875,
130
  "values": [
131
+ 10.1493701171875
132
  ]
133
  },
134
  "throughput": null,
 
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
+ "max_ram": 1176.797184,
142
  "max_global_vram": 68702.69952,
143
+ "max_process_vram": 281326.788608,
144
  "max_reserved": 773.849088,
145
  "max_allocated": 745.086976
146
  },
147
  "latency": {
148
  "unit": "s",
149
+ "count": 71,
150
+ "total": 1.0068196582794189,
151
+ "mean": 0.014180558567315761,
152
+ "stdev": 0.0018349582835734584,
153
+ "p50": 0.013906750679016113,
154
+ "p90": 0.014748831748962403,
155
+ "p95": 0.015031153202056884,
156
+ "p99": 0.019625147247314418,
157
  "values": [
158
+ 0.015302593231201173,
159
+ 0.014583552360534668,
160
+ 0.015128353118896484,
161
+ 0.014851552963256836,
162
+ 0.014664192199707032,
163
+ 0.01462611198425293,
164
+ 0.015823875427246094,
165
+ 0.014595392227172852,
166
+ 0.014401310920715333,
167
+ 0.0144334716796875,
168
+ 0.01441107177734375,
169
+ 0.014417152404785156,
170
+ 0.014439231872558593,
171
+ 0.028494781494140625,
172
+ 0.01450483226776123,
173
+ 0.01442803192138672,
174
+ 0.014414112091064453,
175
+ 0.013709469795227051,
176
+ 0.013597310066223145,
177
+ 0.013906750679016113,
178
+ 0.014455072402954102,
179
+ 0.013410429000854492,
180
+ 0.01405219078063965,
181
+ 0.013598270416259765,
182
+ 0.013783551216125489,
183
+ 0.013602109909057617,
184
+ 0.013801630020141602,
185
+ 0.013759390830993652,
186
+ 0.01365266990661621,
187
+ 0.014029790878295898,
188
+ 0.013562589645385743,
189
+ 0.0135758695602417,
190
+ 0.013373469352722168,
191
+ 0.013835551261901856,
192
+ 0.013639229774475098,
193
+ 0.013203389167785644,
194
+ 0.013061948776245117,
195
+ 0.0131469087600708,
196
+ 0.013400829315185547,
197
+ 0.01284994888305664,
198
+ 0.012906268119812011,
199
+ 0.01329347038269043,
200
+ 0.013249149322509765,
201
+ 0.012933467864990235,
202
+ 0.012953948974609376,
203
+ 0.01293858814239502,
204
+ 0.013158429145812988,
205
+ 0.013124988555908204,
206
+ 0.013152189254760742,
207
+ 0.013106908798217774,
208
+ 0.013615070343017578,
209
+ 0.013397150039672852,
210
+ 0.014480192184448243,
211
+ 0.014466912269592284,
212
+ 0.014748831748962403,
213
+ 0.014525312423706055,
214
+ 0.014456192016601562,
215
+ 0.014553312301635743,
216
+ 0.014933953285217285,
217
+ 0.014615872383117675,
218
+ 0.014494911193847656,
219
+ 0.014508670806884765,
220
+ 0.014833632469177247,
221
+ 0.014527551651000977,
222
+ 0.014533632278442383,
223
+ 0.014506752014160157,
224
+ 0.014036670684814453,
225
+ 0.013565950393676758,
226
+ 0.013655710220336913,
227
+ 0.013432989120483399,
228
+ 0.013584989547729492
 
 
 
 
229
  ]
230
  },
231
  "throughput": {
232
  "unit": "samples/s",
233
+ "value": 70.51908394531529
234
  },
235
  "energy": null,
236
  "efficiency": null