Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json
CHANGED
@@ -104,7 +104,7 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram": 975.
|
108 |
"max_global_vram": 1434.976256,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 794.820608,
|
@@ -113,100 +113,100 @@
|
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
"count": 70,
|
116 |
-
"total": 1.
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.014628864288330079,
|
147 |
-
0.014562303543090821,
|
148 |
-
0.014603263854980468,
|
149 |
-
0.014492671966552734,
|
150 |
-
0.014515199661254884,
|
151 |
-
0.014527487754821777,
|
152 |
-
0.0147424955368042,
|
153 |
-
0.014451711654663087,
|
154 |
-
0.01460531234741211,
|
155 |
-
0.01447532844543457,
|
156 |
-
0.014741503715515136,
|
157 |
-
0.015213567733764649,
|
158 |
-
0.014315520286560059,
|
159 |
-
0.014144512176513671,
|
160 |
-
0.014108672142028808,
|
161 |
-
0.014047231674194336,
|
162 |
-
0.013963264465332031,
|
163 |
-
0.014060544013977052,
|
164 |
-
0.013789183616638183,
|
165 |
-
0.014467071533203125,
|
166 |
-
0.014636032104492188,
|
167 |
-
0.014631936073303223,
|
168 |
-
0.014593024253845215,
|
169 |
-
0.01409331226348877,
|
170 |
-
0.01400115203857422,
|
171 |
-
0.014032896041870118,
|
172 |
-
0.014055520057678223,
|
173 |
-
0.014087200164794922,
|
174 |
-
0.014029824256896972,
|
175 |
-
0.014056447982788087,
|
176 |
-
0.014120960235595703,
|
177 |
-
0.014030847549438476,
|
178 |
-
0.013842432022094727,
|
179 |
-
0.013847552299499511,
|
180 |
-
0.014013440132141113,
|
181 |
-
0.013996031761169434,
|
182 |
-
0.014008319854736329,
|
183 |
-
0.01408512020111084,
|
184 |
-
0.014047231674194336,
|
185 |
-
0.014025728225708007,
|
186 |
-
0.013988863945007325,
|
187 |
0.014048255920410157,
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
192 |
0.013958144187927245,
|
193 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
194 |
]
|
195 |
},
|
196 |
"throughput": {
|
197 |
"unit": "samples/s",
|
198 |
-
"value": 69.
|
199 |
},
|
200 |
"energy": {
|
201 |
"unit": "kWh",
|
202 |
-
"cpu": 1.
|
203 |
-
"ram": 8.
|
204 |
-
"gpu": 3.
|
205 |
-
"total": 5.
|
206 |
},
|
207 |
"efficiency": {
|
208 |
"unit": "samples/kWh",
|
209 |
-
"value":
|
210 |
}
|
211 |
}
|
212 |
}
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 975.343616,
|
108 |
"max_global_vram": 1434.976256,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 794.820608,
|
|
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
"count": 70,
|
116 |
+
"total": 1.0002298612594605,
|
117 |
+
"mean": 0.014288998017992292,
|
118 |
+
"stdev": 0.000889031349371158,
|
119 |
+
"p50": 0.014049791812896728,
|
120 |
+
"p90": 0.014649036884307862,
|
121 |
+
"p95": 0.01495434284210205,
|
122 |
+
"p99": 0.018140692634582525,
|
123 |
"values": [
|
124 |
+
0.020537343978881836,
|
125 |
+
0.017063936233520507,
|
126 |
+
0.014871552467346191,
|
127 |
+
0.014012415885925293,
|
128 |
+
0.013897727966308594,
|
129 |
+
0.013969408035278321,
|
130 |
+
0.013910016059875489,
|
131 |
+
0.01377894401550293,
|
132 |
+
0.013865983963012696,
|
133 |
+
0.014011360168457032,
|
134 |
+
0.0138854398727417,
|
135 |
+
0.013856767654418945,
|
136 |
+
0.014018560409545898,
|
137 |
+
0.013937664031982423,
|
138 |
+
0.013912063598632812,
|
139 |
+
0.01384447956085205,
|
140 |
+
0.013883392333984374,
|
141 |
+
0.013739007949829102,
|
142 |
+
0.013792256355285644,
|
143 |
+
0.013818880081176758,
|
144 |
+
0.01445683193206787,
|
145 |
+
0.014073856353759765,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
146 |
0.014048255920410157,
|
147 |
+
0.013834239959716797,
|
148 |
+
0.014051327705383301,
|
149 |
+
0.013915136337280273,
|
150 |
+
0.013891551971435547,
|
151 |
+
0.013816831588745117,
|
152 |
+
0.013915136337280273,
|
153 |
+
0.014014464378356933,
|
154 |
+
0.01377996826171875,
|
155 |
+
0.013799424171447755,
|
156 |
+
0.013814784049987794,
|
157 |
+
0.013924384117126464,
|
158 |
+
0.014282784461975097,
|
159 |
+
0.014167039871215821,
|
160 |
0.013958144187927245,
|
161 |
+
0.01397657585144043,
|
162 |
+
0.013914112091064454,
|
163 |
+
0.013733887672424316,
|
164 |
+
0.01386291217803955,
|
165 |
+
0.01360588836669922,
|
166 |
+
0.01400115203857422,
|
167 |
+
0.014861344337463379,
|
168 |
+
0.015022080421447754,
|
169 |
+
0.015094783782958985,
|
170 |
+
0.014729215621948242,
|
171 |
+
0.014640128135681153,
|
172 |
+
0.014458880424499512,
|
173 |
+
0.014367744445800782,
|
174 |
+
0.014402560234069824,
|
175 |
+
0.014361599922180175,
|
176 |
+
0.01439641571044922,
|
177 |
+
0.014361599922180175,
|
178 |
+
0.01439027214050293,
|
179 |
+
0.014411775588989258,
|
180 |
+
0.014408703804016113,
|
181 |
+
0.014427136421203614,
|
182 |
+
0.01439027214050293,
|
183 |
+
0.014363648414611817,
|
184 |
+
0.014386176109313965,
|
185 |
+
0.014395392417907715,
|
186 |
+
0.01437491226196289,
|
187 |
+
0.014343168258666992,
|
188 |
+
0.014361599922180175,
|
189 |
+
0.01437279987335205,
|
190 |
+
0.014366656303405763,
|
191 |
+
0.014369791984558105,
|
192 |
+
0.014367744445800782,
|
193 |
+
0.01435756778717041
|
194 |
]
|
195 |
},
|
196 |
"throughput": {
|
197 |
"unit": "samples/s",
|
198 |
+
"value": 69.98391340952172
|
199 |
},
|
200 |
"energy": {
|
201 |
"unit": "kWh",
|
202 |
+
"cpu": 1.6454273843329793e-07,
|
203 |
+
"ram": 8.970683184213145e-08,
|
204 |
+
"gpu": 3.339234026027187e-07,
|
205 |
+
"total": 5.88172972878148e-07
|
206 |
},
|
207 |
"efficiency": {
|
208 |
"unit": "samples/kWh",
|
209 |
+
"value": 1700180.1274659561
|
210 |
}
|
211 |
}
|
212 |
}
|