IlyasMoutawwakil HF staff commited on
Commit
142f2d6
1 Parent(s): d2f673c

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
- "optimum_benchmark_commit": "31aa6620675bda1ecd6e40a22ecaa03106d279d8",
84
  "transformers_version": "4.46.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "1.1.1",
@@ -101,7 +101,7 @@
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
- "max_ram": 965.132288,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
@@ -110,15 +110,15 @@
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
- 1.2161343070000044
114
  ],
115
  "count": 1,
116
- "total": 1.2161343070000044,
117
- "mean": 1.2161343070000044,
118
- "p50": 1.2161343070000044,
119
- "p90": 1.2161343070000044,
120
- "p95": 1.2161343070000044,
121
- "p99": 1.2161343070000044,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
@@ -129,7 +129,7 @@
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
- "max_ram": 981.598208,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
@@ -138,39 +138,38 @@
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
- 0.18017741800002796,
142
- 0.1795368389999794,
143
- 0.17942196400002786,
144
- 0.1322000579999667,
145
- 0.09448050100002092,
146
- 0.0940044499999999,
147
- 0.09258649800000285,
148
- 0.09246434899995393
149
  ],
150
- "count": 8,
151
- "total": 1.0448720769999795,
152
- "mean": 0.13060900962499744,
153
- "p50": 0.11334027949999381,
154
- "p90": 0.17972901269999397,
155
- "p95": 0.17995321535001096,
156
- "p99": 0.18013257747002456,
157
- "stdev": 0.039971997997362216,
158
- "stdev_": 30.604319037506826
159
  },
160
  "throughput": {
161
  "unit": "samples/s",
162
- "value": 15.312879300917823
163
  },
164
  "energy": {
165
  "unit": "kWh",
166
- "cpu": 3.7452534318180895e-06,
167
- "ram": 1.5652067614482134e-07,
168
  "gpu": 0.0,
169
- "total": 3.901774107962911e-06
170
  },
171
  "efficiency": {
172
  "unit": "samples/kWh",
173
- "value": 512587.3371086021
174
  }
175
  }
176
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.15",
82
  "optimum_benchmark_version": "0.5.0.dev0",
83
+ "optimum_benchmark_commit": "1c20082e96a83cbb10a6021fdbbcd050ed6631b4",
84
  "transformers_version": "4.46.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "1.1.1",
 
101
  "load": {
102
  "memory": {
103
  "unit": "MB",
104
+ "max_ram": 966.283264,
105
  "max_global_vram": null,
106
  "max_process_vram": null,
107
  "max_reserved": null,
 
110
  "latency": {
111
  "unit": "s",
112
  "values": [
113
+ 1.2190277400000014
114
  ],
115
  "count": 1,
116
+ "total": 1.2190277400000014,
117
+ "mean": 1.2190277400000014,
118
+ "p50": 1.2190277400000014,
119
+ "p90": 1.2190277400000014,
120
+ "p95": 1.2190277400000014,
121
+ "p99": 1.2190277400000014,
122
  "stdev": 0,
123
  "stdev_": 0
124
  },
 
129
  "forward": {
130
  "memory": {
131
  "unit": "MB",
132
+ "max_ram": 982.781952,
133
  "max_global_vram": null,
134
  "max_process_vram": null,
135
  "max_reserved": null,
 
138
  "latency": {
139
  "unit": "s",
140
  "values": [
141
+ 0.18024533800002018,
142
+ 0.18033672899997555,
143
+ 0.17786964300000818,
144
+ 0.16922332500001858,
145
+ 0.09826937500002941,
146
+ 0.09685704700001452,
147
+ 0.10003447999997661
 
148
  ],
149
+ "count": 7,
150
+ "total": 1.002835937000043,
151
+ "mean": 0.14326227671429187,
152
+ "p50": 0.16922332500001858,
153
+ "p90": 0.18028189440000233,
154
+ "p95": 0.18030931169998893,
155
+ "p99": 0.18033124553997823,
156
+ "stdev": 0.03902445501029402,
157
+ "stdev_": 27.23986795778804
158
  },
159
  "throughput": {
160
  "unit": "samples/s",
161
+ "value": 13.960409159129883
162
  },
163
  "energy": {
164
  "unit": "kWh",
165
+ "cpu": 3.8407534666667616e-06,
166
+ "ram": 1.6050451571512105e-07,
167
  "gpu": 0.0,
168
+ "total": 4.001257982381883e-06
169
  },
170
  "efficiency": {
171
  "unit": "samples/kWh",
172
+ "value": 499842.80164045637
173
  }
174
  }
175
  }