Upload cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark.json
CHANGED
@@ -85,7 +85,7 @@
|
|
85 |
"gpu_count": 1,
|
86 |
"gpu_vram_mb": 68702699520,
|
87 |
"optimum_benchmark_version": "0.2.1",
|
88 |
-
"optimum_benchmark_commit": "
|
89 |
"transformers_version": "4.42.3",
|
90 |
"transformers_commit": null,
|
91 |
"accelerate_version": "0.31.0",
|
@@ -104,156 +104,170 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram":
|
108 |
-
"max_global_vram": 938.
|
109 |
-
"max_process_vram":
|
110 |
"max_reserved": 589.299712,
|
111 |
"max_allocated": 439.700992
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total":
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
153 |
-
0.
|
154 |
-
0.
|
155 |
-
0.
|
156 |
-
0.
|
157 |
-
0.
|
158 |
-
0.
|
159 |
-
0.
|
160 |
-
0.
|
161 |
-
0.
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.
|
167 |
-
0.
|
168 |
-
0.
|
169 |
-
0.
|
170 |
-
0.
|
171 |
-
0.
|
172 |
-
0.
|
173 |
-
0.
|
174 |
-
0.
|
175 |
-
0.
|
176 |
-
0.
|
177 |
-
0.
|
178 |
-
0.
|
179 |
-
0.
|
180 |
-
0.
|
181 |
-
0.
|
182 |
-
0.
|
183 |
-
0.
|
184 |
-
0.
|
185 |
-
0.
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
193 |
-
0.
|
194 |
-
0.
|
195 |
-
0.
|
196 |
-
0.
|
197 |
-
0.
|
198 |
-
0.
|
199 |
-
0.
|
200 |
-
0.
|
201 |
-
0.
|
202 |
-
0.
|
203 |
-
0.
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
207 |
-
0.
|
208 |
-
0.
|
209 |
-
0.
|
210 |
-
0.
|
211 |
-
0.
|
212 |
-
0.
|
213 |
-
0.
|
214 |
-
0.
|
215 |
-
0.
|
216 |
-
0.
|
217 |
-
0.
|
218 |
-
0.
|
219 |
-
0.
|
220 |
-
0.
|
221 |
-
0.
|
222 |
-
0.
|
223 |
-
0.
|
224 |
-
0.
|
225 |
-
0.
|
226 |
-
0.
|
227 |
-
0.
|
228 |
-
0.
|
229 |
-
0.
|
230 |
-
0.
|
231 |
-
0.
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
240 |
-
0.
|
241 |
-
0.
|
242 |
-
0.
|
243 |
-
0.
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.
|
248 |
-
0.
|
249 |
-
0.
|
250 |
-
0.
|
251 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
252 |
]
|
253 |
},
|
254 |
"throughput": {
|
255 |
"unit": "samples/s",
|
256 |
-
"value":
|
257 |
},
|
258 |
"energy": null,
|
259 |
"efficiency": null
|
|
|
85 |
"gpu_count": 1,
|
86 |
"gpu_vram_mb": 68702699520,
|
87 |
"optimum_benchmark_version": "0.2.1",
|
88 |
+
"optimum_benchmark_commit": "c594845efb520077430f4fe3f536bc1756e2b290",
|
89 |
"transformers_version": "4.42.3",
|
90 |
"transformers_commit": null,
|
91 |
"accelerate_version": "0.31.0",
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 1011.843072,
|
108 |
+
"max_global_vram": 938.9056,
|
109 |
+
"max_process_vram": 211362.787328,
|
110 |
"max_reserved": 589.299712,
|
111 |
"max_allocated": 439.700992
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 142,
|
116 |
+
"total": 1.0042363939285277,
|
117 |
+
"mean": 0.007072087281186815,
|
118 |
+
"stdev": 0.0002904386701146157,
|
119 |
+
"p50": 0.006987179040908814,
|
120 |
+
"p90": 0.007315242004394531,
|
121 |
+
"p95": 0.00767704119682312,
|
122 |
+
"p99": 0.008351909317970278,
|
123 |
"values": [
|
124 |
+
0.00686382007598877,
|
125 |
+
0.00703805923461914,
|
126 |
+
0.0069929399490356444,
|
127 |
+
0.00711677885055542,
|
128 |
+
0.007170538902282714,
|
129 |
+
0.007101099014282227,
|
130 |
+
0.007117898941040039,
|
131 |
+
0.007109259128570557,
|
132 |
+
0.00704957914352417,
|
133 |
+
0.007020140171051025,
|
134 |
+
0.006965098857879638,
|
135 |
+
0.007028459072113037,
|
136 |
+
0.006934700012207032,
|
137 |
+
0.006985898971557617,
|
138 |
+
0.007033259868621826,
|
139 |
+
0.006975019931793213,
|
140 |
+
0.0073468580245971676,
|
141 |
+
0.007083178997039795,
|
142 |
+
0.007087018966674805,
|
143 |
+
0.007027659893035889,
|
144 |
+
0.007003018856048584,
|
145 |
+
0.007040618896484375,
|
146 |
+
0.007061738967895508,
|
147 |
+
0.0070494189262390134,
|
148 |
+
0.007101259231567383,
|
149 |
+
0.0070540599822998045,
|
150 |
+
0.0070959792137145995,
|
151 |
+
0.007031979084014893,
|
152 |
+
0.007053739070892334,
|
153 |
+
0.007087338924407959,
|
154 |
+
0.007029579162597656,
|
155 |
+
0.006997738838195801,
|
156 |
+
0.007156778812408447,
|
157 |
+
0.007167018890380859,
|
158 |
+
0.00702125883102417,
|
159 |
+
0.007051019191741943,
|
160 |
+
0.007144779205322266,
|
161 |
+
0.0071790189743041995,
|
162 |
+
0.007321897983551025,
|
163 |
+
0.00717133903503418,
|
164 |
+
0.007317257881164551,
|
165 |
+
0.0072970991134643556,
|
166 |
+
0.007268939018249512,
|
167 |
+
0.008660293579101563,
|
168 |
+
0.008941574096679688,
|
169 |
+
0.006990219116210937,
|
170 |
+
0.006911980152130127,
|
171 |
+
0.006946858882904053,
|
172 |
+
0.006948460102081299,
|
173 |
+
0.006980618953704834,
|
174 |
+
0.00702557897567749,
|
175 |
+
0.007062539100646973,
|
176 |
+
0.006928939819335938,
|
177 |
+
0.006950539112091064,
|
178 |
+
0.006950379848480225,
|
179 |
+
0.007024299144744873,
|
180 |
+
0.006922220230102539,
|
181 |
+
0.006984939098358154,
|
182 |
+
0.006964778900146485,
|
183 |
+
0.006939340114593506,
|
184 |
+
0.0069894189834594726,
|
185 |
+
0.006935180187225342,
|
186 |
+
0.006967658996582031,
|
187 |
+
0.006845739841461182,
|
188 |
+
0.006951018810272217,
|
189 |
+
0.006986859798431396,
|
190 |
+
0.006975499153137207,
|
191 |
+
0.006963180065155029,
|
192 |
+
0.006984138965606689,
|
193 |
+
0.0069159789085388185,
|
194 |
+
0.006973740100860596,
|
195 |
+
0.006981258869171143,
|
196 |
+
0.006925739765167236,
|
197 |
+
0.006960138797760009,
|
198 |
+
0.006944779872894287,
|
199 |
+
0.006964299201965332,
|
200 |
+
0.0068902201652526855,
|
201 |
+
0.006882538795471191,
|
202 |
+
0.006990859985351563,
|
203 |
+
0.0069020590782165525,
|
204 |
+
0.007021420001983643,
|
205 |
+
0.006917259216308594,
|
206 |
+
0.006986539840698242,
|
207 |
+
0.006889898777008057,
|
208 |
+
0.006991980075836182,
|
209 |
+
0.007033099174499511,
|
210 |
+
0.0077580571174621586,
|
211 |
+
0.006987339019775391,
|
212 |
+
0.007012939929962158,
|
213 |
+
0.006972620010375977,
|
214 |
+
0.0069870190620422365,
|
215 |
+
0.00696861982345581,
|
216 |
+
0.006909739017486572,
|
217 |
+
0.006897739887237549,
|
218 |
+
0.0070457391738891605,
|
219 |
+
0.006871018886566162,
|
220 |
+
0.0069977397918701174,
|
221 |
+
0.006897579193115235,
|
222 |
+
0.007881257057189941,
|
223 |
+
0.007784136772155762,
|
224 |
+
0.00790813684463501,
|
225 |
+
0.007680137157440186,
|
226 |
+
0.007754056930541992,
|
227 |
+
0.007618217945098877,
|
228 |
+
0.0074580578804016116,
|
229 |
+
0.007355817794799805,
|
230 |
+
0.007371017932891846,
|
231 |
+
0.0071911787986755375,
|
232 |
+
0.007161259174346924,
|
233 |
+
0.007213098049163818,
|
234 |
+
0.007275339126586914,
|
235 |
+
0.00702509880065918,
|
236 |
+
0.007066858768463135,
|
237 |
+
0.007127018928527832,
|
238 |
+
0.006963500022888184,
|
239 |
+
0.006953098773956299,
|
240 |
+
0.007005578994750976,
|
241 |
+
0.006967179775238037,
|
242 |
+
0.006951818943023682,
|
243 |
+
0.006940139770507813,
|
244 |
+
0.0069540591239929195,
|
245 |
+
0.006983980178833008,
|
246 |
+
0.006879659175872803,
|
247 |
+
0.006956620216369629,
|
248 |
+
0.006948139190673828,
|
249 |
+
0.006958700180053711,
|
250 |
+
0.00688365888595581,
|
251 |
+
0.006971340179443359,
|
252 |
+
0.006872138977050781,
|
253 |
+
0.006905419826507568,
|
254 |
+
0.006857900142669677,
|
255 |
+
0.006888938903808594,
|
256 |
+
0.006832779884338379,
|
257 |
+
0.006839660167694092,
|
258 |
+
0.006889420032501221,
|
259 |
+
0.006862059116363526,
|
260 |
+
0.006875820159912109,
|
261 |
+
0.006869259834289551,
|
262 |
+
0.006861578941345215,
|
263 |
+
0.006833899974822998,
|
264 |
+
0.006832940101623535,
|
265 |
+
0.006866058826446533
|
266 |
]
|
267 |
},
|
268 |
"throughput": {
|
269 |
"unit": "samples/s",
|
270 |
+
"value": 141.4009697900933
|
271 |
},
|
272 |
"energy": null,
|
273 |
"efficiency": null
|