IlyasMoutawwakil HF staff commited on
Commit
bad9693
·
verified ·
1 Parent(s): 28e147d

Upload cuda_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_training_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.2.0.dev20231010+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
@@ -109,7 +109,7 @@
109
  "optimum_commit": null,
110
  "timm_version": "1.0.9",
111
  "timm_commit": null,
112
- "peft_version": "0.12.0",
113
  "peft_commit": null
114
  }
115
  },
@@ -117,33 +117,33 @@
117
  "overall": {
118
  "memory": {
119
  "unit": "MB",
120
- "max_ram": 1207.611392,
121
  "max_global_vram": 68702.69952,
122
- "max_process_vram": 296277.200896,
123
  "max_reserved": 2707.423232,
124
  "max_allocated": 2497.88416
125
  },
126
  "latency": {
127
  "unit": "s",
128
  "count": 5,
129
- "total": 0.7374819030761719,
130
- "mean": 0.14749638061523437,
131
- "stdev": 0.20873067383411237,
132
- "p50": 0.04319273376464844,
133
- "p90": 0.35642812500000004,
134
- "p95": 0.46069266967773426,
135
- "p99": 0.5441043054199218,
136
  "values": [
137
- 0.5649572143554688,
138
- 0.04363449096679688,
139
- 0.042604091644287106,
140
- 0.04319273376464844,
141
- 0.0430933723449707
142
  ]
143
  },
144
  "throughput": {
145
  "unit": "samples/s",
146
- "value": 67.7982738172162
147
  },
148
  "energy": null,
149
  "efficiency": null
@@ -151,30 +151,30 @@
151
  "warmup": {
152
  "memory": {
153
  "unit": "MB",
154
- "max_ram": 1207.611392,
155
  "max_global_vram": 68702.69952,
156
- "max_process_vram": 296277.200896,
157
  "max_reserved": 2707.423232,
158
  "max_allocated": 2497.88416
159
  },
160
  "latency": {
161
  "unit": "s",
162
  "count": 2,
163
- "total": 0.6085917053222656,
164
- "mean": 0.3042958526611328,
165
- "stdev": 0.26066136169433596,
166
- "p50": 0.3042958526611328,
167
- "p90": 0.5128249420166016,
168
- "p95": 0.5388910781860351,
169
- "p99": 0.559743987121582,
170
  "values": [
171
- 0.5649572143554688,
172
- 0.04363449096679688
173
  ]
174
  },
175
  "throughput": {
176
  "unit": "samples/s",
177
- "value": 13.145101929648854
178
  },
179
  "energy": null,
180
  "efficiency": null
@@ -182,31 +182,31 @@
182
  "train": {
183
  "memory": {
184
  "unit": "MB",
185
- "max_ram": 1207.611392,
186
  "max_global_vram": 68702.69952,
187
- "max_process_vram": 296277.200896,
188
  "max_reserved": 2707.423232,
189
  "max_allocated": 2497.88416
190
  },
191
  "latency": {
192
  "unit": "s",
193
  "count": 3,
194
- "total": 0.12889019775390625,
195
- "mean": 0.04296339925130208,
196
- "stdev": 0.0002572866627897311,
197
- "p50": 0.0430933723449707,
198
- "p90": 0.04317286148071289,
199
- "p95": 0.043182797622680666,
200
- "p99": 0.04319074653625488,
201
  "values": [
202
- 0.042604091644287106,
203
- 0.04319273376464844,
204
- 0.0430933723449707
205
  ]
206
  },
207
  "throughput": {
208
  "unit": "samples/s",
209
- "value": 139.65375423170593
210
  },
211
  "energy": null,
212
  "efficiency": null
 
3
  "name": "cuda_training_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
109
  "optimum_commit": null,
110
  "timm_version": "1.0.9",
111
  "timm_commit": null,
112
+ "peft_version": null,
113
  "peft_commit": null
114
  }
115
  },
 
117
  "overall": {
118
  "memory": {
119
  "unit": "MB",
120
+ "max_ram": 1276.567552,
121
  "max_global_vram": 68702.69952,
122
+ "max_process_vram": 271030.484992,
123
  "max_reserved": 2707.423232,
124
  "max_allocated": 2497.88416
125
  },
126
  "latency": {
127
  "unit": "s",
128
  "count": 5,
129
+ "total": 0.7427469482421876,
130
+ "mean": 0.1485493896484375,
131
+ "stdev": 0.20893911527938605,
132
+ "p50": 0.04373689651489258,
133
+ "p90": 0.35881962432861336,
134
+ "p95": 0.46261604995727534,
135
+ "p99": 0.5456531904602051,
136
  "values": [
137
+ 0.5664124755859375,
138
+ 0.04373689651489258,
139
+ 0.04256953430175781,
140
+ 0.042597694396972655,
141
+ 0.047430347442626954
142
  ]
143
  },
144
  "throughput": {
145
  "unit": "samples/s",
146
+ "value": 67.3176781383375
147
  },
148
  "energy": null,
149
  "efficiency": null
 
151
  "warmup": {
152
  "memory": {
153
  "unit": "MB",
154
+ "max_ram": 1276.567552,
155
  "max_global_vram": 68702.69952,
156
+ "max_process_vram": 271030.484992,
157
  "max_reserved": 2707.423232,
158
  "max_allocated": 2497.88416
159
  },
160
  "latency": {
161
  "unit": "s",
162
  "count": 2,
163
+ "total": 0.6101493721008301,
164
+ "mean": 0.30507468605041504,
165
+ "stdev": 0.2613377895355225,
166
+ "p50": 0.30507468605041504,
167
+ "p90": 0.5141449176788331,
168
+ "p95": 0.5402786966323853,
169
+ "p99": 0.5611857197952271,
170
  "values": [
171
+ 0.5664124755859375,
172
+ 0.04373689651489258
173
  ]
174
  },
175
  "throughput": {
176
  "unit": "samples/s",
177
+ "value": 13.111543444607465
178
  },
179
  "energy": null,
180
  "efficiency": null
 
182
  "train": {
183
  "memory": {
184
  "unit": "MB",
185
+ "max_ram": 1276.567552,
186
  "max_global_vram": 68702.69952,
187
+ "max_process_vram": 271030.484992,
188
  "max_reserved": 2707.423232,
189
  "max_allocated": 2497.88416
190
  },
191
  "latency": {
192
  "unit": "s",
193
  "count": 3,
194
+ "total": 0.13259757614135742,
195
+ "mean": 0.04419919204711914,
196
+ "stdev": 0.0022848008141017557,
197
+ "p50": 0.042597694396972655,
198
+ "p90": 0.0464638168334961,
199
+ "p95": 0.04694708213806152,
200
+ "p99": 0.047333694381713864,
201
  "values": [
202
+ 0.04256953430175781,
203
+ 0.042597694396972655,
204
+ 0.047430347442626954
205
  ]
206
  },
207
  "throughput": {
208
  "unit": "samples/s",
209
+ "value": 135.74908775716125
210
  },
211
  "energy": null,
212
  "efficiency": null