IlyasMoutawwakil HF staff commited on
Commit
a27edd0
·
verified ·
1 Parent(s): 10e17a4

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
- "optimum_benchmark_commit": "5e9beea10942a9d4b3bee05550004817630e3d7e",
89
  "transformers_version": "4.41.0",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
@@ -104,95 +104,101 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1027.50208,
108
- "max_global_vram": 2103.222272,
109
- "max_process_vram": 255002.054656,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 67,
116
- "total": 0.9967311601638791,
117
- "mean": 0.014876584480057903,
118
- "stdev": 0.000902131377261893,
119
- "p50": 0.014948933601379395,
120
- "p90": 0.015715843391418457,
121
- "p95": 0.015844258880615233,
122
- "p99": 0.01678210786819459,
123
  "values": [
124
- 0.01832956886291504,
125
- 0.015480611801147461,
126
- 0.01535885238647461,
127
- 0.015757251739501953,
128
- 0.015707011222839354,
129
- 0.015546051979064942,
130
- 0.015515811920166015,
131
- 0.015616452217102051,
132
- 0.015391652107238769,
133
- 0.015874691009521484,
134
- 0.015488612174987792,
135
- 0.015882691383361815,
136
- 0.01565261173248291,
137
- 0.015271812438964845,
138
- 0.015459491729736327,
139
- 0.01512461280822754,
140
- 0.015043652534484864,
141
- 0.015773250579833985,
142
- 0.015390372276306153,
143
- 0.015156453132629394,
144
- 0.015609251022338868,
145
- 0.015345573425292969,
146
- 0.01511853313446045,
147
- 0.015418532371520997,
148
- 0.015330211639404297,
149
- 0.015635492324829102,
150
- 0.015984930992126466,
151
- 0.01572909164428711,
152
- 0.015442051887512208,
153
- 0.015521251678466796,
154
- 0.014948933601379395,
155
- 0.014939653396606445,
156
- 0.014653413772583009,
157
- 0.015212132453918456,
158
- 0.014813093185424805,
159
- 0.01488205337524414,
160
- 0.014769413948059081,
161
- 0.01522909164428711,
162
- 0.014807653427124024,
163
- 0.01480637264251709,
164
- 0.014766694068908692,
165
- 0.01507069206237793,
166
- 0.014813094139099122,
167
- 0.014716293334960938,
168
- 0.014788932800292968,
169
- 0.013960615158081054,
170
- 0.014756772994995117,
171
- 0.014581093788146973,
172
- 0.014853572845458985,
173
- 0.014856773376464844,
174
- 0.014653733253479004,
175
- 0.014519813537597657,
176
- 0.013929254531860351,
177
- 0.014377894401550293,
178
- 0.013926215171813965,
179
- 0.01414349365234375,
180
- 0.013623174667358398,
181
- 0.013277095794677734,
182
- 0.013235495567321778,
183
- 0.01372285556793213,
184
- 0.013269736289978027,
185
- 0.013273736000061036,
186
- 0.013274215698242187,
187
- 0.013456456184387207,
188
- 0.01323533535003662,
189
- 0.01331053638458252,
190
- 0.0133193359375
 
 
 
 
 
 
191
  ]
192
  },
193
  "throughput": {
194
  "unit": "samples/s",
195
- "value": 67.21973053293935
196
  },
197
  "energy": null,
198
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
+ "optimum_benchmark_commit": "079be6729111d58c45eb7f4a60e984bbf86c5a81",
89
  "transformers_version": "4.41.0",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1026.90816,
108
+ "max_global_vram": 2103.226368,
109
+ "max_process_vram": 239249.035264,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 73,
116
+ "total": 1.0020606813430786,
117
+ "mean": 0.013726858648535322,
118
+ "stdev": 0.0004930121338117998,
119
+ "p50": 0.013566572189331055,
120
+ "p90": 0.014304172325134278,
121
+ "p95": 0.014471691894531248,
122
+ "p99": 0.01531950138092041,
123
  "values": [
124
+ 0.013991531372070312,
125
+ 0.01466545295715332,
126
+ 0.014287531852722168,
127
+ 0.01393169116973877,
128
+ 0.013573931694030763,
129
+ 0.013688970565795898,
130
+ 0.013878252029418945,
131
+ 0.014358092308044433,
132
+ 0.013518731117248535,
133
+ 0.014184012413024902,
134
+ 0.014308332443237304,
135
+ 0.015744013786315916,
136
+ 0.013727371215820312,
137
+ 0.013429611206054688,
138
+ 0.0134451322555542,
139
+ 0.013461291313171387,
140
+ 0.01341377067565918,
141
+ 0.013485132217407226,
142
+ 0.01342337131500244,
143
+ 0.01351089096069336,
144
+ 0.013401290893554688,
145
+ 0.013544331550598145,
146
+ 0.013427210807800292,
147
+ 0.013457290649414063,
148
+ 0.013431371688842774,
149
+ 0.013450410842895508,
150
+ 0.013438250541687012,
151
+ 0.013449932098388672,
152
+ 0.013520971298217773,
153
+ 0.013155851364135742,
154
+ 0.01310417079925537,
155
+ 0.012565610885620117,
156
+ 0.01334929084777832,
157
+ 0.013547531127929687,
158
+ 0.013566572189331055,
159
+ 0.013523051261901856,
160
+ 0.013503690719604493,
161
+ 0.013571051597595215,
162
+ 0.013502731323242187,
163
+ 0.013519371032714844,
164
+ 0.013582411766052247,
165
+ 0.013568490982055663,
166
+ 0.013708492279052735,
167
+ 0.013660011291503906,
168
+ 0.013529611587524413,
169
+ 0.013538411140441894,
170
+ 0.013630571365356446,
171
+ 0.013516331672668457,
172
+ 0.01355393123626709,
173
+ 0.013576811790466308,
174
+ 0.013516651153564454,
175
+ 0.013544330596923828,
176
+ 0.01358913230895996,
177
+ 0.013532490730285644,
178
+ 0.015154413223266602,
179
+ 0.014629132270812988,
180
+ 0.014072491645812988,
181
+ 0.014042572021484375,
182
+ 0.01402705192565918,
183
+ 0.01399009132385254,
184
+ 0.014354572296142579,
185
+ 0.014043051719665528,
186
+ 0.01403313159942627,
187
+ 0.014132652282714844,
188
+ 0.013982571601867676,
189
+ 0.01410145092010498,
190
+ 0.014049132347106934,
191
+ 0.014055691719055175,
192
+ 0.014366731643676758,
193
+ 0.014085452079772949,
194
+ 0.013530730247497558,
195
+ 0.012595531463623046,
196
+ 0.012709450721740723
197
  ]
198
  },
199
  "throughput": {
200
  "unit": "samples/s",
201
+ "value": 72.84987961223754
202
  },
203
  "energy": null,
204
  "efficiency": null