Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -3,7 +3,7 @@
|
|
3 |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
|
4 |
"backend": {
|
5 |
"name": "pytorch",
|
6 |
-
"version": "2.3.
|
7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
8 |
"task": "text-classification",
|
9 |
"library": "transformers",
|
@@ -73,10 +73,10 @@
|
|
73 |
"environment": {
|
74 |
"cpu": " AMD EPYC 7R32",
|
75 |
"cpu_count": 16,
|
76 |
-
"cpu_ram_mb": 66697.
|
77 |
"system": "Linux",
|
78 |
"machine": "x86_64",
|
79 |
-
"platform": "Linux-5.10.
|
80 |
"processor": "x86_64",
|
81 |
"python_version": "3.10.12",
|
82 |
"gpu": [
|
@@ -86,15 +86,15 @@
|
|
86 |
"gpu_vram_mb": 24146608128,
|
87 |
"optimum_benchmark_version": "0.2.1",
|
88 |
"optimum_benchmark_commit": null,
|
89 |
-
"transformers_version": "4.
|
90 |
"transformers_commit": null,
|
91 |
-
"accelerate_version": "0.
|
92 |
"accelerate_commit": null,
|
93 |
-
"diffusers_version": "0.
|
94 |
"diffusers_commit": null,
|
95 |
"optimum_version": null,
|
96 |
"optimum_commit": null,
|
97 |
-
"timm_version": "1.0.
|
98 |
"timm_commit": null,
|
99 |
"peft_version": null,
|
100 |
"peft_commit": null
|
@@ -104,7 +104,7 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram":
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
@@ -112,185 +112,166 @@
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total":
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
153 |
-
0.
|
154 |
-
0.
|
155 |
-
0.
|
156 |
-
0.
|
157 |
-
0.
|
158 |
-
0.
|
159 |
-
0.
|
160 |
-
0.
|
161 |
-
0.
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.
|
167 |
-
0.
|
168 |
-
0.
|
169 |
-
0.
|
170 |
-
0.
|
171 |
-
0.
|
172 |
-
0.
|
173 |
-
0.
|
174 |
-
0.
|
175 |
-
0.
|
176 |
-
0.
|
177 |
-
0.
|
178 |
-
0.
|
179 |
-
0.
|
180 |
-
0.
|
181 |
-
0.
|
182 |
-
0.
|
183 |
-
0.
|
184 |
-
0.
|
185 |
-
0.
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
193 |
-
0.
|
194 |
-
0.
|
195 |
-
0.
|
196 |
-
0.
|
197 |
-
0.
|
198 |
-
0.
|
199 |
-
0.
|
200 |
-
0.
|
201 |
-
0.
|
202 |
-
0.
|
203 |
-
0.
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
207 |
-
0.
|
208 |
-
0.
|
209 |
-
0.
|
210 |
-
0.
|
211 |
-
0.
|
212 |
-
0.
|
213 |
-
0.
|
214 |
-
0.
|
215 |
-
0.
|
216 |
-
0.
|
217 |
-
0.
|
218 |
-
0.
|
219 |
-
0.
|
220 |
-
0.
|
221 |
-
0.
|
222 |
-
0.
|
223 |
-
0.
|
224 |
-
0.
|
225 |
-
0.
|
226 |
-
0.
|
227 |
-
0.
|
228 |
-
0.
|
229 |
-
0.
|
230 |
-
0.
|
231 |
-
0.
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
240 |
-
0.
|
241 |
-
0.
|
242 |
-
0.
|
243 |
-
0.
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.
|
248 |
-
0.
|
249 |
-
0.
|
250 |
-
0.
|
251 |
-
0.
|
252 |
-
0.
|
253 |
-
0.
|
254 |
-
0.
|
255 |
-
0.
|
256 |
-
0.
|
257 |
-
0.
|
258 |
-
0.
|
259 |
-
0.00657203197479248,
|
260 |
-
0.0065474557876586915,
|
261 |
-
0.006535168170928955,
|
262 |
-
0.006569983959197998,
|
263 |
-
0.0065372161865234375,
|
264 |
-
0.006589439868927002,
|
265 |
-
0.006562816143035889,
|
266 |
-
0.006586368083953857,
|
267 |
-
0.006582272052764892,
|
268 |
-
0.006543360233306885,
|
269 |
-
0.006608895778656006,
|
270 |
-
0.006620160102844238,
|
271 |
-
0.006508543968200684,
|
272 |
-
0.006557695865631104,
|
273 |
-
0.006498303890228272,
|
274 |
-
0.006525951862335205,
|
275 |
-
0.00658022403717041,
|
276 |
-
0.006549503803253174,
|
277 |
-
0.006505472183227539
|
278 |
]
|
279 |
},
|
280 |
"throughput": {
|
281 |
"unit": "samples/s",
|
282 |
-
"value":
|
283 |
},
|
284 |
"energy": {
|
285 |
"unit": "kWh",
|
286 |
-
"cpu":
|
287 |
-
"ram": 4.
|
288 |
-
"gpu": 1.
|
289 |
-
"total": 2.
|
290 |
},
|
291 |
"efficiency": {
|
292 |
"unit": "samples/kWh",
|
293 |
-
"value":
|
294 |
}
|
295 |
}
|
296 |
}
|
|
|
3 |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
|
4 |
"backend": {
|
5 |
"name": "pytorch",
|
6 |
+
"version": "2.3.1+cu121",
|
7 |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
|
8 |
"task": "text-classification",
|
9 |
"library": "transformers",
|
|
|
73 |
"environment": {
|
74 |
"cpu": " AMD EPYC 7R32",
|
75 |
"cpu_count": 16,
|
76 |
+
"cpu_ram_mb": 66697.293824,
|
77 |
"system": "Linux",
|
78 |
"machine": "x86_64",
|
79 |
+
"platform": "Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35",
|
80 |
"processor": "x86_64",
|
81 |
"python_version": "3.10.12",
|
82 |
"gpu": [
|
|
|
86 |
"gpu_vram_mb": 24146608128,
|
87 |
"optimum_benchmark_version": "0.2.1",
|
88 |
"optimum_benchmark_commit": null,
|
89 |
+
"transformers_version": "4.42.3",
|
90 |
"transformers_commit": null,
|
91 |
+
"accelerate_version": "0.31.0",
|
92 |
"accelerate_commit": null,
|
93 |
+
"diffusers_version": "0.29.2",
|
94 |
"diffusers_commit": null,
|
95 |
"optimum_version": null,
|
96 |
"optimum_commit": null,
|
97 |
+
"timm_version": "1.0.7",
|
98 |
"timm_commit": null,
|
99 |
"peft_version": null,
|
100 |
"peft_commit": null
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 903.794688,
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 135,
|
116 |
+
"total": 1.0010643186569208,
|
117 |
+
"mean": 0.00741529124931053,
|
118 |
+
"stdev": 0.0004882032942976823,
|
119 |
+
"p50": 0.007348159790039063,
|
120 |
+
"p90": 0.007455743885040283,
|
121 |
+
"p95": 0.007651430416107178,
|
122 |
+
"p99": 0.010179071712493895,
|
123 |
"values": [
|
124 |
+
0.01073151969909668,
|
125 |
+
0.010239999771118164,
|
126 |
+
0.010060799598693848,
|
127 |
+
0.00898252773284912,
|
128 |
+
0.007477248191833496,
|
129 |
+
0.007445504188537597,
|
130 |
+
0.0074301438331604,
|
131 |
+
0.008047552108764648,
|
132 |
+
0.007101439952850342,
|
133 |
+
0.006986752033233643,
|
134 |
+
0.006918144226074219,
|
135 |
+
0.00692633581161499,
|
136 |
+
0.0069283838272094726,
|
137 |
+
0.006910912036895752,
|
138 |
+
0.006908959865570068,
|
139 |
+
0.007078911781311035,
|
140 |
+
0.006974463939666748,
|
141 |
+
0.0069621758460998535,
|
142 |
+
0.007419904232025146,
|
143 |
+
0.007371776103973389,
|
144 |
+
0.007348159790039063,
|
145 |
+
0.0076308479309082035,
|
146 |
+
0.007361536026000977,
|
147 |
+
0.007337984085083008,
|
148 |
+
0.007354368209838867,
|
149 |
+
0.0073400321006774905,
|
150 |
+
0.007329792022705078,
|
151 |
+
0.007358463764190673,
|
152 |
+
0.007349247932434082,
|
153 |
+
0.007319551944732666,
|
154 |
+
0.00733900785446167,
|
155 |
+
0.007320576190948487,
|
156 |
+
0.00739737606048584,
|
157 |
+
0.007364511966705322,
|
158 |
+
0.0073175039291381834,
|
159 |
+
0.0073359360694885255,
|
160 |
+
0.007319551944732666,
|
161 |
+
0.007331840038299561,
|
162 |
+
0.007301152229309082,
|
163 |
+
0.007316480159759522,
|
164 |
+
0.007334911823272705,
|
165 |
+
0.007929855823516846,
|
166 |
+
0.007400447845458984,
|
167 |
+
0.007478271961212158,
|
168 |
+
0.007402495861053467,
|
169 |
+
0.007404543876647949,
|
170 |
+
0.007300096035003662,
|
171 |
+
0.007320608139038086,
|
172 |
+
0.007398399829864502,
|
173 |
+
0.0073431038856506346,
|
174 |
+
0.007327744007110596,
|
175 |
+
0.007329792022705078,
|
176 |
+
0.00729702377319336,
|
177 |
+
0.007362559795379638,
|
178 |
+
0.007373824119567871,
|
179 |
+
0.007402495861053467,
|
180 |
+
0.007457791805267334,
|
181 |
+
0.007375872135162354,
|
182 |
+
0.007384064197540283,
|
183 |
+
0.007444479942321777,
|
184 |
+
0.0074106879234313965,
|
185 |
+
0.007386112213134765,
|
186 |
+
0.007479296207427978,
|
187 |
+
0.0074301438331604,
|
188 |
+
0.007372799873352051,
|
189 |
+
0.007400447845458984,
|
190 |
+
0.007366655826568603,
|
191 |
+
0.007342144012451172,
|
192 |
+
0.007367680072784424,
|
193 |
+
0.007413760185241699,
|
194 |
+
0.007361536026000977,
|
195 |
+
0.007357439994812012,
|
196 |
+
0.007408639907836914,
|
197 |
+
0.0073697280883789065,
|
198 |
+
0.007408639907836914,
|
199 |
+
0.007476223945617676,
|
200 |
+
0.007368703842163086,
|
201 |
+
0.0073820161819458,
|
202 |
+
0.007377855777740479,
|
203 |
+
0.007320576190948487,
|
204 |
+
0.007318560123443603,
|
205 |
+
0.0072509760856628415,
|
206 |
+
0.007235583782196045,
|
207 |
+
0.0072468481063842774,
|
208 |
+
0.007177216053009033,
|
209 |
+
0.007198719978332519,
|
210 |
+
0.007264256000518799,
|
211 |
+
0.007286784172058106,
|
212 |
+
0.00733081579208374,
|
213 |
+
0.007316480159759522,
|
214 |
+
0.007393280029296875,
|
215 |
+
0.007336959838867187,
|
216 |
+
0.007373824119567871,
|
217 |
+
0.007385087966918945,
|
218 |
+
0.00733900785446167,
|
219 |
+
0.007359488010406494,
|
220 |
+
0.00734822416305542,
|
221 |
+
0.007344128131866455,
|
222 |
+
0.007344128131866455,
|
223 |
+
0.0074414081573486324,
|
224 |
+
0.007385087966918945,
|
225 |
+
0.007358463764190673,
|
226 |
+
0.00734822416305542,
|
227 |
+
0.007354368209838867,
|
228 |
+
0.007292928218841553,
|
229 |
+
0.007699456214904785,
|
230 |
+
0.007452672004699707,
|
231 |
+
0.007505887985229492,
|
232 |
+
0.007390207767486572,
|
233 |
+
0.007325695991516113,
|
234 |
+
0.007321599960327148,
|
235 |
+
0.007295040130615235,
|
236 |
+
0.007322624206542969,
|
237 |
+
0.007375872135162354,
|
238 |
+
0.007402495861053467,
|
239 |
+
0.007332863807678222,
|
240 |
+
0.00734822416305542,
|
241 |
+
0.007316480159759522,
|
242 |
+
0.007319551944732666,
|
243 |
+
0.007359488010406494,
|
244 |
+
0.007364607810974121,
|
245 |
+
0.007327744007110596,
|
246 |
+
0.007319551944732666,
|
247 |
+
0.007312384128570557,
|
248 |
+
0.007284736156463623,
|
249 |
+
0.0073471999168395995,
|
250 |
+
0.0073400321006774905,
|
251 |
+
0.00724070405960083,
|
252 |
+
0.0071905279159545895,
|
253 |
+
0.007146495819091797,
|
254 |
+
0.007233535766601563,
|
255 |
+
0.007325695991516113,
|
256 |
+
0.007321568012237549,
|
257 |
+
0.007346176147460938,
|
258 |
+
0.0073175039291381834
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
259 |
]
|
260 |
},
|
261 |
"throughput": {
|
262 |
"unit": "samples/s",
|
263 |
+
"value": 134.8564697432457
|
264 |
},
|
265 |
"energy": {
|
266 |
"unit": "kWh",
|
267 |
+
"cpu": 8.461303535907988e-08,
|
268 |
+
"ram": 4.626100755093887e-08,
|
269 |
+
"gpu": 1.5189694691428479e-07,
|
270 |
+
"total": 2.8277098982430353e-07
|
271 |
},
|
272 |
"efficiency": {
|
273 |
"unit": "samples/kWh",
|
274 |
+
"value": 3536430.6664602985
|
275 |
}
|
276 |
}
|
277 |
}
|