IlyasMoutawwakil HF staff commited on
Commit
e038850
1 Parent(s): abbbf2e

Upload cpu_training_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_training_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -6,17 +6,19 @@
6
  "version": "2.3.0+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
- "library": "transformers",
10
  "model": "FacebookAI/roberta-base",
11
- "processor": "FacebookAI/roberta-base",
12
  "device": "cpu",
13
  "device_ids": null,
14
  "seed": 42,
15
  "inter_op_num_threads": null,
16
  "intra_op_num_threads": null,
17
- "model_kwargs": {},
18
- "processor_kwargs": {},
19
- "hub_kwargs": {},
 
 
 
20
  "no_weights": true,
21
  "device_map": null,
22
  "torch_dtype": null,
@@ -74,17 +76,17 @@
74
  "environment": {
75
  "cpu": " AMD EPYC 7763 64-Core Processor",
76
  "cpu_count": 4,
77
- "cpu_ram_mb": 16757.342208,
78
  "system": "Linux",
79
  "machine": "x86_64",
80
  "platform": "Linux-6.5.0-1018-azure-x86_64-with-glibc2.35",
81
  "processor": "x86_64",
82
  "python_version": "3.10.14",
83
  "optimum_benchmark_version": "0.2.0",
84
- "optimum_benchmark_commit": "a8ccb91469272f9f38883fbf3e14bffcd5e95f2d",
85
  "transformers_version": "4.40.2",
86
  "transformers_commit": null,
87
- "accelerate_version": "0.30.0",
88
  "accelerate_commit": null,
89
  "diffusers_version": "0.27.2",
90
  "diffusers_commit": null,
@@ -100,7 +102,7 @@
100
  "overall": {
101
  "memory": {
102
  "unit": "MB",
103
- "max_ram": 2851.672064,
104
  "max_global_vram": null,
105
  "max_process_vram": null,
106
  "max_reserved": null,
@@ -109,24 +111,24 @@
109
  "latency": {
110
  "unit": "s",
111
  "count": 5,
112
- "total": 3.000934516999905,
113
- "mean": 0.600186903399981,
114
- "stdev": 0.05017703639796197,
115
- "p50": 0.5769044820000317,
116
- "p90": 0.6520690779999541,
117
- "p95": 0.6762200539999412,
118
- "p99": 0.695540834799931,
119
  "values": [
120
- 0.7003710299999284,
121
- 0.5769044820000317,
122
- 0.572004416000027,
123
- 0.5796161499999926,
124
- 0.5720384389999253
125
  ]
126
  },
127
  "throughput": {
128
  "unit": "samples/s",
129
- "value": 16.661476522315457
130
  },
131
  "energy": null,
132
  "efficiency": null
@@ -134,7 +136,7 @@
134
  "warmup": {
135
  "memory": {
136
  "unit": "MB",
137
- "max_ram": 2851.672064,
138
  "max_global_vram": null,
139
  "max_process_vram": null,
140
  "max_reserved": null,
@@ -143,21 +145,21 @@
143
  "latency": {
144
  "unit": "s",
145
  "count": 2,
146
- "total": 1.2772755119999601,
147
- "mean": 0.6386377559999801,
148
- "stdev": 0.061733273999948324,
149
- "p50": 0.6386377559999801,
150
- "p90": 0.6880243751999388,
151
- "p95": 0.6941977025999335,
152
- "p99": 0.6991363645199294,
153
  "values": [
154
- 0.7003710299999284,
155
- 0.5769044820000317
156
  ]
157
  },
158
  "throughput": {
159
  "unit": "samples/s",
160
- "value": 6.263331540329609
161
  },
162
  "energy": null,
163
  "efficiency": null
@@ -165,7 +167,7 @@
165
  "train": {
166
  "memory": {
167
  "unit": "MB",
168
- "max_ram": 2851.672064,
169
  "max_global_vram": null,
170
  "max_process_vram": null,
171
  "max_reserved": null,
@@ -174,22 +176,22 @@
174
  "latency": {
175
  "unit": "s",
176
  "count": 3,
177
- "total": 1.723659004999945,
178
- "mean": 0.5745530016666484,
179
- "stdev": 0.0035802134643263413,
180
- "p50": 0.5720384389999253,
181
- "p90": 0.5781006077999791,
182
- "p95": 0.5788583788999858,
183
- "p99": 0.5794645957799912,
184
  "values": [
185
- 0.572004416000027,
186
- 0.5796161499999926,
187
- 0.5720384389999253
188
  ]
189
  },
190
  "throughput": {
191
  "unit": "samples/s",
192
- "value": 10.442900798699783
193
  },
194
  "energy": null,
195
  "efficiency": null
 
6
  "version": "2.3.0+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
 
9
  "model": "FacebookAI/roberta-base",
10
+ "library": "transformers",
11
  "device": "cpu",
12
  "device_ids": null,
13
  "seed": 42,
14
  "inter_op_num_threads": null,
15
  "intra_op_num_threads": null,
16
+ "hub_kwargs": {
17
+ "revision": "main",
18
+ "force_download": false,
19
+ "local_files_only": false,
20
+ "trust_remote_code": false
21
+ },
22
  "no_weights": true,
23
  "device_map": null,
24
  "torch_dtype": null,
 
76
  "environment": {
77
  "cpu": " AMD EPYC 7763 64-Core Processor",
78
  "cpu_count": 4,
79
+ "cpu_ram_mb": 16757.346304,
80
  "system": "Linux",
81
  "machine": "x86_64",
82
  "platform": "Linux-6.5.0-1018-azure-x86_64-with-glibc2.35",
83
  "processor": "x86_64",
84
  "python_version": "3.10.14",
85
  "optimum_benchmark_version": "0.2.0",
86
+ "optimum_benchmark_commit": "217063f5c507ed7cc255df7e1f64c4333a0b4dfe",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
+ "accelerate_version": "0.30.1",
90
  "accelerate_commit": null,
91
  "diffusers_version": "0.27.2",
92
  "diffusers_commit": null,
 
102
  "overall": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 2834.825216,
106
  "max_global_vram": null,
107
  "max_process_vram": null,
108
  "max_reserved": null,
 
111
  "latency": {
112
  "unit": "s",
113
  "count": 5,
114
+ "total": 3.05117056499995,
115
+ "mean": 0.61023411299999,
116
+ "stdev": 0.049766250845493576,
117
+ "p50": 0.5849996929999861,
118
+ "p90": 0.6628558386000009,
119
+ "p95": 0.6861116968000146,
120
+ "p99": 0.7047163833600257,
121
  "values": [
122
+ 0.7093675550000285,
123
+ 0.5849996929999861,
124
+ 0.5845724739999696,
125
+ 0.5791425790000062,
126
+ 0.5930882639999595
127
  ]
128
  },
129
  "throughput": {
130
  "unit": "samples/s",
131
+ "value": 16.387153367809454
132
  },
133
  "energy": null,
134
  "efficiency": null
 
136
  "warmup": {
137
  "memory": {
138
  "unit": "MB",
139
+ "max_ram": 2834.825216,
140
  "max_global_vram": null,
141
  "max_process_vram": null,
142
  "max_reserved": null,
 
145
  "latency": {
146
  "unit": "s",
147
  "count": 2,
148
+ "total": 1.2943672480000146,
149
+ "mean": 0.6471836240000073,
150
+ "stdev": 0.062183931000021175,
151
+ "p50": 0.6471836240000073,
152
+ "p90": 0.6969307688000242,
153
+ "p95": 0.7031491619000263,
154
+ "p99": 0.708123876380028,
155
  "values": [
156
+ 0.7093675550000285,
157
+ 0.5849996929999861
158
  ]
159
  },
160
  "throughput": {
161
  "unit": "samples/s",
162
+ "value": 6.1806261031103515
163
  },
164
  "energy": null,
165
  "efficiency": null
 
167
  "train": {
168
  "memory": {
169
  "unit": "MB",
170
+ "max_ram": 2834.825216,
171
  "max_global_vram": null,
172
  "max_process_vram": null,
173
  "max_reserved": null,
 
176
  "latency": {
177
  "unit": "s",
178
  "count": 3,
179
+ "total": 1.7568033169999353,
180
+ "mean": 0.5856011056666451,
181
+ "stdev": 0.005739575760091585,
182
+ "p50": 0.5845724739999696,
183
+ "p90": 0.5913851059999615,
184
+ "p95": 0.5922366849999605,
185
+ "p99": 0.5929179481999597,
186
  "values": [
187
+ 0.5845724739999696,
188
+ 0.5791425790000062,
189
+ 0.5930882639999595
190
  ]
191
  },
192
  "throughput": {
193
  "unit": "samples/s",
194
+ "value": 10.245882294176395
195
  },
196
  "energy": null,
197
  "efficiency": null