IlyasMoutawwakil HF staff commited on
Commit
5779e4b
·
verified ·
1 Parent(s): 4cd962f

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 976.34304,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -112,101 +112,102 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 70,
116
- "total": 1.0102527704238893,
117
- "mean": 0.014432182434626989,
118
- "stdev": 0.0008683820003625622,
119
- "p50": 0.014217728137969971,
120
- "p90": 0.014784483242034912,
121
- "p95": 0.01512724471092224,
122
- "p99": 0.01820702688217164,
123
  "values": [
124
- 0.020771839141845702,
125
- 0.017054719924926756,
126
- 0.014927871704101562,
127
- 0.014776288032531738,
128
- 0.014513216018676757,
129
- 0.014129152297973633,
130
- 0.014304256439208985,
131
- 0.01529036808013916,
132
- 0.014493696212768555,
133
- 0.015335424423217774,
134
- 0.014858240127563477,
135
- 0.014622783660888672,
136
- 0.014568448066711426,
137
- 0.01430835247039795,
138
- 0.014183423995971679,
139
- 0.014898176193237305,
140
- 0.013733887672424316,
141
- 0.014010368347167968,
142
- 0.014233599662780762,
143
- 0.014350336074829101,
144
- 0.014224384307861328,
145
- 0.014247936248779297,
146
- 0.014193696022033691,
147
- 0.014325759887695312,
148
- 0.014243840217590332,
149
- 0.014220288276672363,
150
- 0.014243840217590332,
151
- 0.014270463943481445,
152
- 0.014206975936889648,
153
- 0.01418239974975586,
154
- 0.014322688102722168,
155
- 0.014191616058349609,
156
- 0.014194687843322755,
157
- 0.014279680252075195,
158
- 0.014237695693969727,
159
- 0.014215167999267577,
160
- 0.01411683177947998,
161
- 0.014202879905700684,
162
- 0.014234623908996581,
163
- 0.014394368171691894,
164
- 0.014310400009155273,
165
- 0.01426636791229248,
166
- 0.014276608467102051,
167
- 0.014172160148620605,
168
- 0.014187520027160644,
169
- 0.014205951690673829,
170
- 0.01418233585357666,
171
- 0.014194720268249512,
172
- 0.014212096214294433,
173
- 0.014181376457214356,
174
- 0.01417420768737793,
175
- 0.014231552124023437,
176
- 0.014155776023864745,
177
- 0.014311424255371094,
178
  0.014154751777648926,
179
- 0.014186495780944825,
180
- 0.014186495780944825,
181
- 0.014166015625,
182
- 0.0141844482421875,
183
- 0.014193663597106934,
184
- 0.014211071968078613,
185
- 0.014272512435913086,
186
  0.0141844482421875,
187
- 0.01417728042602539,
188
- 0.014252032279968262,
189
- 0.014141440391540527,
190
- 0.014203904151916504,
191
- 0.014146559715270996,
192
- 0.014206879615783692,
193
- 0.014208000183105468
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
  ]
195
  },
196
  "throughput": {
197
  "unit": "samples/s",
198
- "value": 69.28958974359347
199
  },
200
  "energy": {
201
  "unit": "kWh",
202
- "cpu": 1.6740216802170205e-07,
203
- "ram": 9.151575718035709e-08,
204
- "gpu": 3.5668651383098745e-07,
205
- "total": 6.156044390330466e-07
206
  },
207
  "efficiency": {
208
  "unit": "samples/kWh",
209
- "value": 1624419.7354566485
210
  }
211
  }
212
  }
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 976.191488,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 71,
116
+ "total": 1.004907585144043,
117
+ "mean": 0.014153627959775253,
118
+ "stdev": 0.0004936119670674675,
119
+ "p50": 0.014081024169921874,
120
+ "p90": 0.014718976020812988,
121
+ "p95": 0.015069695949554443,
122
+ "p99": 0.015791308975219725,
123
  "values": [
124
+ 0.015667200088500977,
125
+ 0.016080896377563478,
126
+ 0.015572992324829102,
127
+ 0.015316991806030274,
128
+ 0.01439846420288086,
129
+ 0.013988927841186523,
130
+ 0.014002240180969239,
131
+ 0.01425715160369873,
132
+ 0.01408614444732666,
133
+ 0.014180352210998535,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
134
  0.014154751777648926,
135
+ 0.014177248001098633,
136
+ 0.014109696388244629,
137
+ 0.01398681640625,
138
+ 0.01396735954284668,
139
+ 0.01397555160522461,
140
+ 0.014142463684082032,
141
+ 0.014422016143798828,
142
  0.0141844482421875,
143
+ 0.014819328308105468,
144
+ 0.014718976020812988,
145
+ 0.014503935813903808,
146
+ 0.01477014446258545,
147
+ 0.014553088188171387,
148
+ 0.014038016319274902,
149
+ 0.01406771183013916,
150
+ 0.014395392417907715,
151
+ 0.01437491226196289,
152
+ 0.014155776023864745,
153
+ 0.014113792419433594,
154
+ 0.0139683837890625,
155
+ 0.01397555160522461,
156
+ 0.014024703979492188,
157
+ 0.013801471710205078,
158
+ 0.013965312004089356,
159
+ 0.01460428810119629,
160
+ 0.014822400093078614,
161
+ 0.014631936073303223,
162
+ 0.014284799575805664,
163
+ 0.014132224082946777,
164
+ 0.014101504325866699,
165
+ 0.014139391899108887,
166
+ 0.014078975677490235,
167
+ 0.014139391899108887,
168
+ 0.014040063858032227,
169
+ 0.01408512020111084,
170
+ 0.014024703979492188,
171
+ 0.014072832107543945,
172
+ 0.014045184135437011,
173
+ 0.014029824256896972,
174
+ 0.014073856353759765,
175
+ 0.01409126377105713,
176
+ 0.014310400009155273,
177
+ 0.014081024169921874,
178
+ 0.014140416145324707,
179
+ 0.014036992073059081,
180
+ 0.014054400444030762,
181
+ 0.013623295783996582,
182
+ 0.013562879562377929,
183
+ 0.013586432456970214,
184
+ 0.013569024085998535,
185
+ 0.01356390380859375,
186
+ 0.013650943756103515,
187
+ 0.01354751968383789,
188
+ 0.013549568176269532,
189
+ 0.013531135559082032,
190
+ 0.013574144363403321,
191
+ 0.01353932762145996,
192
+ 0.013566975593566894,
193
+ 0.013544447898864746,
194
+ 0.013558783531188966
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
+ "value": 70.65326309565361
200
  },
201
  "energy": {
202
  "unit": "kWh",
203
+ "cpu": 1.6689085064919344e-07,
204
+ "ram": 9.123992078923466e-08,
205
+ "gpu": 3.508922994929543e-07,
206
+ "total": 6.090230709313824e-07
207
  },
208
  "efficiency": {
209
  "unit": "samples/kWh",
210
+ "value": 1641973.9213995528
211
  }
212
  }
213
  }