Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json
CHANGED
@@ -73,7 +73,7 @@
|
|
73 |
"environment": {
|
74 |
"cpu": " AMD EPYC 7R32",
|
75 |
"cpu_count": 16,
|
76 |
-
"cpu_ram_mb": 66697.
|
77 |
"system": "Linux",
|
78 |
"machine": "x86_64",
|
79 |
"platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
|
@@ -104,7 +104,7 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram": 959.
|
108 |
"max_global_vram": 1434.976256,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 794.820608,
|
@@ -112,102 +112,106 @@
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total": 1.
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.013911040306091308,
|
139 |
-
0.014451711654663087,
|
140 |
-
0.01435142421722412,
|
141 |
-
0.014237759590148925,
|
142 |
-
0.014254079818725587,
|
143 |
-
0.014326784133911133,
|
144 |
-
0.014495776176452637,
|
145 |
-
0.014680064201354981,
|
146 |
-
0.014665727615356445,
|
147 |
-
0.014452735900878906,
|
148 |
-
0.01447116756439209,
|
149 |
-
0.014387200355529785,
|
150 |
-
0.014425087928771972,
|
151 |
-
0.014427136421203614,
|
152 |
-
0.014561280250549317,
|
153 |
-
0.014337023735046387,
|
154 |
-
0.014312447547912598,
|
155 |
-
0.014306303977966308,
|
156 |
-
0.014476287841796874,
|
157 |
-
0.01395199966430664,
|
158 |
-
0.013934528350830078,
|
159 |
-
0.014520319938659668,
|
160 |
-
0.014698495864868164,
|
161 |
-
0.014575615882873535,
|
162 |
-
0.014436351776123046,
|
163 |
-
0.014706687927246094,
|
164 |
-
0.014144512176513671,
|
165 |
-
0.01420083236694336,
|
166 |
-
0.014492671966552734,
|
167 |
-
0.014668800354003907,
|
168 |
-
0.014135295867919923,
|
169 |
0.01395404815673828,
|
170 |
-
0.
|
171 |
-
0.
|
172 |
-
0.
|
173 |
-
0.013817855834960938,
|
174 |
-
0.014183423995971679,
|
175 |
-
0.014565376281738282,
|
176 |
-
0.013883392333984374,
|
177 |
-
0.013847552299499511,
|
178 |
-
0.01380352020263672,
|
179 |
-
0.013775872230529786,
|
180 |
0.013813759803771973,
|
181 |
-
0.
|
182 |
-
0.
|
183 |
-
0.
|
184 |
-
0.
|
185 |
-
0.
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
193 |
-
0.
|
194 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
195 |
]
|
196 |
},
|
197 |
"throughput": {
|
198 |
"unit": "samples/s",
|
199 |
-
"value":
|
200 |
},
|
201 |
"energy": {
|
202 |
"unit": "kWh",
|
203 |
-
"cpu": 1.
|
204 |
-
"ram": 8.
|
205 |
-
"gpu": 3.
|
206 |
-
"total": 5.
|
207 |
},
|
208 |
"efficiency": {
|
209 |
"unit": "samples/kWh",
|
210 |
-
"value":
|
211 |
}
|
212 |
}
|
213 |
}
|
|
|
73 |
"environment": {
|
74 |
"cpu": " AMD EPYC 7R32",
|
75 |
"cpu_count": 16,
|
76 |
+
"cpu_ram_mb": 66697.29792,
|
77 |
"system": "Linux",
|
78 |
"machine": "x86_64",
|
79 |
"platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35",
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 959.975424,
|
108 |
"max_global_vram": 1434.976256,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 794.820608,
|
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 75,
|
116 |
+
"total": 1.0082426242828368,
|
117 |
+
"mean": 0.013443234990437824,
|
118 |
+
"stdev": 0.0007456770533109025,
|
119 |
+
"p50": 0.013674495697021484,
|
120 |
+
"p90": 0.014222949981689454,
|
121 |
+
"p95": 0.014849126434326171,
|
122 |
+
"p99": 0.015321067581176762,
|
123 |
"values": [
|
124 |
+
0.014947327613830566,
|
125 |
+
0.0150763521194458,
|
126 |
+
0.014740480422973632,
|
127 |
+
0.015088640213012695,
|
128 |
+
0.014284799575805664,
|
129 |
+
0.013293567657470704,
|
130 |
+
0.013551615715026855,
|
131 |
+
0.013715456008911133,
|
132 |
+
0.013684736251831055,
|
133 |
+
0.014000127792358399,
|
134 |
+
0.013731840133666993,
|
135 |
+
0.013726719856262207,
|
136 |
+
0.013833215713500976,
|
137 |
+
0.013806591987609864,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
138 |
0.01395404815673828,
|
139 |
+
0.013799424171447755,
|
140 |
+
0.013836288452148437,
|
141 |
+
0.013591551780700683,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
142 |
0.013813759803771973,
|
143 |
+
0.01377280044555664,
|
144 |
+
0.013683712005615235,
|
145 |
+
0.014130175590515137,
|
146 |
+
0.013972479820251465,
|
147 |
+
0.013706239700317382,
|
148 |
+
0.013917183876037598,
|
149 |
+
0.01376972770690918,
|
150 |
+
0.013674495697021484,
|
151 |
+
0.01375641632080078,
|
152 |
+
0.013767680168151856,
|
153 |
+
0.013882368087768555,
|
154 |
+
0.013604864120483399,
|
155 |
+
0.013857791900634766,
|
156 |
+
0.013698047637939453,
|
157 |
+
0.013813759803771973,
|
158 |
+
0.014121983528137207,
|
159 |
+
0.014346240043640136,
|
160 |
+
0.013826047897338867,
|
161 |
+
0.01372054386138916,
|
162 |
+
0.013690879821777344,
|
163 |
+
0.013595647811889648,
|
164 |
+
0.01369600009918213,
|
165 |
+
0.014807040214538575,
|
166 |
+
0.01598259162902832,
|
167 |
+
0.013386752128601074,
|
168 |
+
0.013181952476501465,
|
169 |
+
0.013023232460021973,
|
170 |
+
0.012668928146362305,
|
171 |
+
0.012690431594848632,
|
172 |
+
0.01273036766052246,
|
173 |
+
0.012975104331970215,
|
174 |
+
0.012660736083984376,
|
175 |
+
0.012609536170959473,
|
176 |
+
0.01266483211517334,
|
177 |
+
0.012606464385986327,
|
178 |
+
0.012621824264526366,
|
179 |
+
0.012969984054565429,
|
180 |
+
0.012698623657226562,
|
181 |
+
0.012641280174255372,
|
182 |
+
0.012712960243225097,
|
183 |
+
0.01260540771484375,
|
184 |
+
0.012668928146362305,
|
185 |
+
0.012727295875549317,
|
186 |
+
0.012659711837768555,
|
187 |
+
0.012614656448364257,
|
188 |
+
0.012609536170959473,
|
189 |
+
0.012731391906738282,
|
190 |
+
0.012596223831176758,
|
191 |
+
0.012621824264526366,
|
192 |
+
0.012692480087280274,
|
193 |
+
0.012637184143066407,
|
194 |
+
0.012582912445068359,
|
195 |
+
0.012596223831176758,
|
196 |
+
0.012613632202148438,
|
197 |
+
0.012593152046203614,
|
198 |
+
0.012577792167663575
|
199 |
]
|
200 |
},
|
201 |
"throughput": {
|
202 |
"unit": "samples/s",
|
203 |
+
"value": 74.38685708546345
|
204 |
},
|
205 |
"energy": {
|
206 |
"unit": "kWh",
|
207 |
+
"cpu": 1.496383630567127e-07,
|
208 |
+
"ram": 8.168073093202111e-08,
|
209 |
+
"gpu": 3.4314263562488375e-07,
|
210 |
+
"total": 5.744617296136175e-07
|
211 |
},
|
212 |
"efficiency": {
|
213 |
"unit": "samples/kWh",
|
214 |
+
"value": 1740759.9992302344
|
215 |
}
|
216 |
}
|
217 |
}
|