Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -85,7 +85,7 @@
|
|
85 |
"gpu_count": 1,
|
86 |
"gpu_vram_mb": 68702699520,
|
87 |
"optimum_benchmark_version": "0.2.0",
|
88 |
-
"optimum_benchmark_commit": "
|
89 |
"transformers_version": "4.40.2",
|
90 |
"transformers_commit": null,
|
91 |
"accelerate_version": "0.30.1",
|
@@ -104,158 +104,151 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram":
|
108 |
-
"max_global_vram": 898.
|
109 |
-
"max_process_vram":
|
110 |
"max_reserved": 555.74528,
|
111 |
"max_allocated": 499.507712
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total":
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
153 |
-
0.
|
154 |
-
0.
|
155 |
-
0.
|
156 |
-
0.
|
157 |
-
0.
|
158 |
-
0.
|
159 |
-
0.
|
160 |
-
0.
|
161 |
-
0.
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.
|
167 |
-
0.
|
168 |
-
0.
|
169 |
-
0.
|
170 |
-
0.
|
171 |
-
0.
|
172 |
-
0.
|
173 |
-
0.
|
174 |
-
0.
|
175 |
-
0.
|
176 |
-
0.
|
177 |
-
0.
|
178 |
-
0.
|
179 |
-
0.
|
180 |
-
0.
|
181 |
-
0.
|
182 |
-
0.
|
183 |
-
0.
|
184 |
-
0.
|
185 |
-
0.
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
193 |
-
0.
|
194 |
-
0.
|
195 |
-
0.
|
196 |
-
0.
|
197 |
-
0.
|
198 |
-
0.
|
199 |
-
0.
|
200 |
-
0.
|
201 |
-
0.
|
202 |
-
0.
|
203 |
-
0.
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
207 |
-
0.
|
208 |
-
0.
|
209 |
-
0.
|
210 |
-
0.
|
211 |
-
0.
|
212 |
-
0.
|
213 |
-
0.
|
214 |
-
0.
|
215 |
-
0.
|
216 |
-
0.
|
217 |
-
0.
|
218 |
-
0.
|
219 |
-
0.
|
220 |
-
0.
|
221 |
-
0.
|
222 |
-
0.
|
223 |
-
0.
|
224 |
-
0.
|
225 |
-
0.
|
226 |
-
0.
|
227 |
-
0.
|
228 |
-
0.
|
229 |
-
0.
|
230 |
-
0.
|
231 |
-
0.
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
240 |
-
0.
|
241 |
-
0.
|
242 |
-
0.
|
243 |
-
0.
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.007937565803527832,
|
248 |
-
0.008007644653320312,
|
249 |
-
0.007970526218414306,
|
250 |
-
0.007813405990600587,
|
251 |
-
0.007600286960601807,
|
252 |
-
0.007563967227935791,
|
253 |
-
0.008408283233642578
|
254 |
]
|
255 |
},
|
256 |
"throughput": {
|
257 |
"unit": "samples/s",
|
258 |
-
"value":
|
259 |
},
|
260 |
"energy": null,
|
261 |
"efficiency": null
|
|
|
85 |
"gpu_count": 1,
|
86 |
"gpu_vram_mb": 68702699520,
|
87 |
"optimum_benchmark_version": "0.2.0",
|
88 |
+
"optimum_benchmark_commit": "c08a62a8b686d201c33b94256f220dd9ac7afa59",
|
89 |
"transformers_version": "4.40.2",
|
90 |
"transformers_commit": null,
|
91 |
"accelerate_version": "0.30.1",
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 1004.630016,
|
108 |
+
"max_global_vram": 898.461696,
|
109 |
+
"max_process_vram": 206006.80448,
|
110 |
"max_reserved": 555.74528,
|
111 |
"max_allocated": 499.507712
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 123,
|
116 |
+
"total": 1.002975475788117,
|
117 |
+
"mean": 0.008154272160878995,
|
118 |
+
"stdev": 0.00011117940415605982,
|
119 |
+
"p50": 0.008132152557373047,
|
120 |
+
"p90": 0.008305816841125488,
|
121 |
+
"p95": 0.008370792770385743,
|
122 |
+
"p99": 0.008439938526153564,
|
123 |
"values": [
|
124 |
+
0.008315993309020995,
|
125 |
+
0.008108312606811524,
|
126 |
+
0.008124794006347656,
|
127 |
+
0.008110233306884766,
|
128 |
+
0.008152632713317872,
|
129 |
+
0.008226713180541993,
|
130 |
+
0.00816015338897705,
|
131 |
+
0.008158713340759278,
|
132 |
+
0.008185752868652344,
|
133 |
+
0.008147353172302247,
|
134 |
+
0.008118073463439941,
|
135 |
+
0.008103034019470216,
|
136 |
+
0.008072632789611817,
|
137 |
+
0.008076152801513671,
|
138 |
+
0.008031673431396484,
|
139 |
+
0.008033272743225097,
|
140 |
+
0.00803679370880127,
|
141 |
+
0.008044472694396973,
|
142 |
+
0.008027032852172851,
|
143 |
+
0.008046712875366211,
|
144 |
+
0.008043032646179199,
|
145 |
+
0.008062873840332031,
|
146 |
+
0.008024152755737304,
|
147 |
+
0.008017593383789063,
|
148 |
+
0.008043032646179199,
|
149 |
+
0.008050872802734374,
|
150 |
+
0.008055193901062012,
|
151 |
+
0.008127992630004882,
|
152 |
+
0.008185592651367188,
|
153 |
+
0.008151033401489257,
|
154 |
+
0.00821711254119873,
|
155 |
+
0.008226232528686523,
|
156 |
+
0.00817231273651123,
|
157 |
+
0.008145112991333008,
|
158 |
+
0.008089913368225097,
|
159 |
+
0.008025914192199708,
|
160 |
+
0.008037432670593262,
|
161 |
+
0.007996633052825927,
|
162 |
+
0.008018233299255372,
|
163 |
+
0.00805311393737793,
|
164 |
+
0.008078554153442382,
|
165 |
+
0.00803791332244873,
|
166 |
+
0.008260473251342774,
|
167 |
+
0.008121752738952636,
|
168 |
+
0.008195512771606446,
|
169 |
+
0.008086393356323242,
|
170 |
+
0.007994873046875,
|
171 |
+
0.008021114349365234,
|
172 |
+
0.008026073455810547,
|
173 |
+
0.008032313346862793,
|
174 |
+
0.008027832984924316,
|
175 |
+
0.00824879264831543,
|
176 |
+
0.008110234260559082,
|
177 |
+
0.00829775333404541,
|
178 |
+
0.008094232559204101,
|
179 |
+
0.008073432922363281,
|
180 |
+
0.008131193161010743,
|
181 |
+
0.008148153305053711,
|
182 |
+
0.008262072563171387,
|
183 |
+
0.008097593307495116,
|
184 |
+
0.008128632545471191,
|
185 |
+
0.008204954147338867,
|
186 |
+
0.008233593940734864,
|
187 |
+
0.0081444730758667,
|
188 |
+
0.008093913078308106,
|
189 |
+
0.008059352874755859,
|
190 |
+
0.0080396728515625,
|
191 |
+
0.008132152557373047,
|
192 |
+
0.008075834274291992,
|
193 |
+
0.008091193199157715,
|
194 |
+
0.008128313064575196,
|
195 |
+
0.008051833152770997,
|
196 |
+
0.008049753189086914,
|
197 |
+
0.008140632629394531,
|
198 |
+
0.008187353134155273,
|
199 |
+
0.008062073707580567,
|
200 |
+
0.008075352668762206,
|
201 |
+
0.00804399299621582,
|
202 |
+
0.008051353454589844,
|
203 |
+
0.0080679931640625,
|
204 |
+
0.008099034309387207,
|
205 |
+
0.008068312644958496,
|
206 |
+
0.008036473274230956,
|
207 |
+
0.008071513175964355,
|
208 |
+
0.00809647274017334,
|
209 |
+
0.008205912590026855,
|
210 |
+
0.008209273338317872,
|
211 |
+
0.008300312995910645,
|
212 |
+
0.008126394271850586,
|
213 |
+
0.008208473205566407,
|
214 |
+
0.008177753448486327,
|
215 |
+
0.008392152786254882,
|
216 |
+
0.00837103271484375,
|
217 |
+
0.008409111976623536,
|
218 |
+
0.008404953002929687,
|
219 |
+
0.008350393295288087,
|
220 |
+
0.0083071928024292,
|
221 |
+
0.008346073150634766,
|
222 |
+
0.008391512870788573,
|
223 |
+
0.008230552673339844,
|
224 |
+
0.008285272598266601,
|
225 |
+
0.008262392997741699,
|
226 |
+
0.008143353462219238,
|
227 |
+
0.008174233436584473,
|
228 |
+
0.008287833213806153,
|
229 |
+
0.008172952651977539,
|
230 |
+
0.0081812744140625,
|
231 |
+
0.008252794265747071,
|
232 |
+
0.008171353340148926,
|
233 |
+
0.008143033027648926,
|
234 |
+
0.008448633193969726,
|
235 |
+
0.008534071922302246,
|
236 |
+
0.008368633270263671,
|
237 |
+
0.008337593078613282,
|
238 |
+
0.00829023265838623,
|
239 |
+
0.008249432563781738,
|
240 |
+
0.00828639316558838,
|
241 |
+
0.00827103328704834,
|
242 |
+
0.008178072929382324,
|
243 |
+
0.008142073631286622,
|
244 |
+
0.008242072105407715,
|
245 |
+
0.00818271255493164,
|
246 |
+
0.008134552955627442
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
247 |
]
|
248 |
},
|
249 |
"throughput": {
|
250 |
"unit": "samples/s",
|
251 |
+
"value": 122.63510222256359
|
252 |
},
|
253 |
"energy": null,
|
254 |
"efficiency": null
|