IlyasMoutawwakil HF staff commited on
Commit
8bbefff
·
verified ·
1 Parent(s): beca6a4

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.0",
88
- "optimum_benchmark_commit": "d7339efdbbcababd22ca2f563771622060764f8f",
89
  "transformers_version": "4.40.2",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
@@ -104,102 +104,100 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1026.781184,
108
- "max_global_vram": 1122.865152,
109
- "max_process_vram": 297342.005248,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 74,
116
- "total": 1.0075324335098266,
117
- "mean": 0.013615303155538197,
118
- "stdev": 0.0007567196307469012,
119
- "p50": 0.013767148494720459,
120
- "p90": 0.014350314712524414,
121
- "p95": 0.014479969930648803,
122
- "p99": 0.014990337381362913,
123
  "values": [
124
- 0.013753548622131347,
125
- 0.013795469284057617,
126
- 0.01366475009918213,
127
- 0.013539790153503418,
128
- 0.013350510597229004,
129
- 0.012659632682800293,
130
- 0.012621393203735351,
131
- 0.012309075355529785,
132
- 0.01220731544494629,
133
- 0.012159794807434081,
134
- 0.012139155387878417,
135
- 0.012258834838867187,
136
- 0.012250194549560547,
137
- 0.01218603515625,
138
- 0.012155315399169923,
139
- 0.012176594734191894,
140
- 0.013127471923828125,
141
- 0.01230411434173584,
142
- 0.01556090259552002,
143
- 0.014779306411743165,
144
- 0.014160906791687012,
145
- 0.01419898796081543,
146
- 0.014379146575927735,
147
- 0.014338666915893555,
148
- 0.01414154815673828,
149
- 0.014186026573181153,
150
- 0.01435530662536621,
151
- 0.01405274772644043,
152
- 0.014083627700805665,
153
- 0.014212587356567382,
154
- 0.014088908195495605,
155
- 0.014116268157958984,
156
- 0.014040907859802247,
157
- 0.014143628120422363,
158
- 0.014335467338562011,
159
- 0.014133708000183105,
160
- 0.014162668228149414,
161
- 0.014177227973937988,
162
- 0.014200587272644043,
163
- 0.014114507675170898,
164
- 0.014476105690002442,
165
- 0.014144107818603516,
166
- 0.014133546829223633,
167
- 0.014186827659606934,
168
- 0.01416314697265625,
169
- 0.014148907661437988,
170
- 0.014487146377563476,
171
- 0.013512909889221192,
172
- 0.013390030860900879,
173
- 0.013139790534973144,
174
- 0.0127236328125,
175
- 0.01302683162689209,
176
- 0.013519789695739746,
177
- 0.013167311668395996,
178
- 0.013014032363891601,
179
- 0.012863793373107911,
180
- 0.013229230880737304,
181
- 0.013806509017944337,
182
- 0.014564585685729981,
183
- 0.013738028526306153,
184
- 0.014379306793212891,
185
- 0.014230187416076661,
186
- 0.013572589874267578,
187
- 0.013582829475402833,
188
- 0.013586990356445313,
189
- 0.013421070098876954,
190
- 0.012927472114562988,
191
- 0.013614830017089843,
192
- 0.013293391227722167,
193
- 0.013891307830810547,
194
- 0.013451149940490723,
195
- 0.013516909599304199,
196
- 0.01378074836730957,
197
- 0.014224747657775878
198
  ]
199
  },
200
  "throughput": {
201
  "unit": "samples/s",
202
- "value": 73.44676711023047
203
  },
204
  "energy": null,
205
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.0",
88
+ "optimum_benchmark_commit": "c08a62a8b686d201c33b94256f220dd9ac7afa59",
89
  "transformers_version": "4.40.2",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1026.002944,
108
+ "max_global_vram": 1122.861056,
109
+ "max_process_vram": 293400.072192,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 72,
116
+ "total": 0.9982586460113525,
117
+ "mean": 0.013864703416824341,
118
+ "stdev": 0.0008775687975349177,
119
+ "p50": 0.013518068313598633,
120
+ "p90": 0.015226274681091308,
121
+ "p95": 0.015443306303024292,
122
+ "p99": 0.015825706443786627,
123
  "values": [
124
+ 0.014052627563476562,
125
+ 0.013443188667297363,
126
+ 0.013963027954101563,
127
+ 0.013929587364196777,
128
+ 0.014079828262329102,
129
+ 0.014433587074279785,
130
+ 0.015525266647338868,
131
+ 0.015425585746765136,
132
+ 0.015281427383422851,
133
+ 0.014912787437438966,
134
+ 0.014777907371520996,
135
+ 0.014775827407836914,
136
+ 0.014732787132263184,
137
+ 0.01488798713684082,
138
+ 0.01493102741241455,
139
+ 0.014955986976623535,
140
+ 0.014879507064819336,
141
+ 0.014914067268371582,
142
+ 0.015256306648254395,
143
+ 0.01551390552520752,
144
+ 0.015449906349182129,
145
+ 0.01656126594543457,
146
+ 0.015437906265258788,
147
+ 0.01454238796234131,
148
+ 0.014739347457885741,
149
+ 0.014297587394714355,
150
+ 0.01328446865081787,
151
+ 0.013039347648620605,
152
+ 0.013294548988342285,
153
+ 0.013050868034362792,
154
+ 0.013018709182739258,
155
+ 0.013044628143310547,
156
+ 0.013322229385375977,
157
+ 0.01306478786468506,
158
+ 0.013261268615722657,
159
+ 0.013448947906494141,
160
+ 0.013012788772583007,
161
+ 0.013073908805847167,
162
+ 0.01343742847442627,
163
+ 0.013072628974914551,
164
+ 0.01309310817718506,
165
+ 0.013475668907165528,
166
+ 0.01312958812713623,
167
+ 0.013103029251098634,
168
+ 0.013540307998657226,
169
+ 0.013056468009948731,
170
+ 0.013020468711853026,
171
+ 0.013529427528381347,
172
+ 0.013089109420776367,
173
+ 0.01303646755218506,
174
+ 0.013506709098815918,
175
+ 0.013137747764587402,
176
+ 0.013062548637390137,
177
+ 0.01307726764678955,
178
+ 0.01308094882965088,
179
+ 0.013655187606811524,
180
+ 0.013132147789001465,
181
+ 0.013093428611755371,
182
+ 0.012960948944091798,
183
+ 0.013092787742614746,
184
+ 0.013567828178405761,
185
+ 0.013003028869628907,
186
+ 0.013061107635498047,
187
+ 0.01327998924255371,
188
+ 0.012995508193969727,
189
+ 0.014052147865295411,
190
+ 0.01462110710144043,
191
+ 0.013949587821960449,
192
+ 0.014319348335266113,
193
+ 0.013757747650146484,
194
+ 0.01387950611114502,
195
+ 0.013771187782287598
 
 
196
  ]
197
  },
198
  "throughput": {
199
  "unit": "samples/s",
200
+ "value": 72.12559619461707
201
  },
202
  "energy": null,
203
  "efficiency": null