IlyasMoutawwakil HF staff commited on
Commit
2516e38
·
verified ·
1 Parent(s): b906b23

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cpu_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.1+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
@@ -76,14 +76,14 @@
76
  "cpu_ram_mb": 16757.342208,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
- "platform": "Linux-6.5.0-1023-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
- "optimum_benchmark_commit": "328b924ef0be0164f0dc86652abfd3746f634c6b",
84
  "transformers_version": "4.42.4",
85
  "transformers_commit": null,
86
- "accelerate_version": "0.32.1",
87
  "accelerate_commit": null,
88
  "diffusers_version": "0.29.2",
89
  "diffusers_commit": null,
@@ -99,7 +99,7 @@
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 939.843584,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -108,31 +108,31 @@
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
- "total": 4.306336681000005,
112
- "mean": 4.306336681000005,
113
  "stdev": 0.0,
114
- "p50": 4.306336681000005,
115
- "p90": 4.306336681000005,
116
- "p95": 4.306336681000005,
117
- "p99": 4.306336681000005,
118
  "values": [
119
- 4.306336681000005
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
- "cpu": 4.917403194639418e-05,
126
- "ram": 2.055247578489343e-06,
127
  "gpu": 0,
128
- "total": 5.1229279524883526e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
- "max_ram": 948.494336,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
@@ -140,64 +140,59 @@
140
  },
141
  "latency": {
142
  "unit": "s",
143
- "count": 33,
144
- "total": 1.018148321999945,
145
- "mean": 0.030852979454543787,
146
- "stdev": 0.0009472797954657501,
147
- "p50": 0.030748930999948243,
148
- "p90": 0.031449613600022984,
149
- "p95": 0.032437692999974385,
150
- "p99": 0.033826236720003636,
151
  "values": [
152
- 0.03115915800003677,
153
- 0.030978247000007286,
154
- 0.030727591000015764,
155
- 0.030406340000013188,
156
- 0.030853856999954132,
157
- 0.031473917000027996,
158
- 0.03127944299995988,
159
- 0.03112019500002816,
160
- 0.030887971000026937,
161
- 0.03055784399998629,
162
- 0.030230781000000206,
163
- 0.030277428000033524,
164
- 0.031033193000041592,
165
- 0.030748930999948243,
166
- 0.030351927999959116,
167
- 0.030652711000016097,
168
- 0.030684900999972342,
169
- 0.030443488999992496,
170
- 0.03436501400000225,
171
- 0.03122740500003829,
172
- 0.03126610800001117,
173
- 0.03114128400000027,
174
- 0.03130137399995192,
175
- 0.031352400000002945,
176
- 0.029802029999984825,
177
- 0.030134433000000627,
178
- 0.029992454999955953,
179
- 0.030477251999968757,
180
- 0.032681335000006584,
181
- 0.032275264999952924,
182
- 0.029205494000052568,
183
- 0.029377824999983204,
184
- 0.029680723000012676
185
  ]
186
  },
187
  "throughput": {
188
  "unit": "samples/s",
189
- "value": 32.41178056963078
190
  },
191
  "energy": {
192
  "unit": "kWh",
193
- "cpu": 1.1605036024953805e-06,
194
- "ram": 4.849963094677471e-08,
195
  "gpu": 0.0,
196
- "total": 1.2090032334421554e-06
197
  },
198
  "efficiency": {
199
  "unit": "samples/kWh",
200
- "value": 827127.647254423
201
  }
202
  }
203
  }
 
3
  "name": "cpu_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+cpu",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
 
76
  "cpu_ram_mb": 16757.342208,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
+ "platform": "Linux-6.5.0-1024-azure-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.3.1",
83
+ "optimum_benchmark_commit": "b0801269b3611e452bb077a62163b08a99ceb2a9",
84
  "transformers_version": "4.42.4",
85
  "transformers_commit": null,
86
+ "accelerate_version": "0.33.0",
87
  "accelerate_commit": null,
88
  "diffusers_version": "0.29.2",
89
  "diffusers_commit": null,
 
99
  "load": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 946.757632,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
108
  "latency": {
109
  "unit": "s",
110
  "count": 1,
111
+ "total": 4.323723781000012,
112
+ "mean": 4.323723781000012,
113
  "stdev": 0.0,
114
+ "p50": 4.323723781000012,
115
+ "p90": 4.323723781000012,
116
+ "p95": 4.323723781000012,
117
+ "p99": 4.323723781000012,
118
  "values": [
119
+ 4.323723781000012
120
  ]
121
  },
122
  "throughput": null,
123
  "energy": {
124
  "unit": "kWh",
125
+ "cpu": 4.856773747338189e-05,
126
+ "ram": 2.029772746200251e-06,
127
  "gpu": 0,
128
+ "total": 5.059751021958214e-05
129
  },
130
  "efficiency": null
131
  },
132
  "forward": {
133
  "memory": {
134
  "unit": "MB",
135
+ "max_ram": 954.228736,
136
  "max_global_vram": null,
137
  "max_process_vram": null,
138
  "max_reserved": null,
 
140
  },
141
  "latency": {
142
  "unit": "s",
143
+ "count": 28,
144
+ "total": 1.0016023750000613,
145
+ "mean": 0.03577151339285933,
146
+ "stdev": 0.0012009202301223243,
147
+ "p50": 0.035923455500011414,
148
+ "p90": 0.03684090080001283,
149
+ "p95": 0.03698486219998358,
150
+ "p99": 0.03821847168996783,
151
  "values": [
152
+ 0.03587814000002254,
153
+ 0.03569901499997741,
154
+ 0.03496999899999764,
155
+ 0.03404766099998824,
156
+ 0.03652564300000449,
157
+ 0.035557920999963244,
158
+ 0.03582958999999164,
159
+ 0.03596877100000029,
160
+ 0.0356541909999919,
161
+ 0.03506414600002472,
162
+ 0.034437160999971184,
163
+ 0.03685386900002641,
164
+ 0.03705539699996052,
165
+ 0.03549194700002545,
166
+ 0.03864864999997053,
167
+ 0.036835343000007015,
168
+ 0.03600408600004812,
169
+ 0.03598173499995028,
170
+ 0.03651958200003946,
171
+ 0.03597425100002738,
172
+ 0.03641235200001347,
173
+ 0.03621639500005358,
174
+ 0.036089666999998826,
175
+ 0.0357534770000143,
176
+ 0.036172531999966395,
177
+ 0.035748357999978,
178
+ 0.034796585000037794,
179
+ 0.03141591100001051
 
 
 
 
 
180
  ]
181
  },
182
  "throughput": {
183
  "unit": "samples/s",
184
+ "value": 27.955205277941044
185
  },
186
  "energy": {
187
  "unit": "kWh",
188
+ "cpu": 1.221425293220414e-06,
189
+ "ram": 5.10452785314186e-08,
190
  "gpu": 0.0,
191
+ "total": 1.2724705717518328e-06
192
  },
193
  "efficiency": {
194
  "unit": "samples/kWh",
195
+ "value": 785872.7912452092
196
  }
197
  }
198
  }