IlyasMoutawwakil HF staff commited on
Commit
d9930b1
1 Parent(s): ec3b911

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.0",
83
- "optimum_benchmark_commit": "b3690661eecec40c5905418f03e56991fd2bca89",
84
  "transformers_version": "4.40.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.30.1",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 942.678016,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,50 +107,57 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 19,
111
- "total": 1.0007379490000972,
112
- "mean": 0.05267041836842617,
113
- "stdev": 0.00556302420093012,
114
- "p50": 0.05315935100000502,
115
- "p90": 0.059177086400018196,
116
- "p95": 0.06141372179999962,
117
- "p99": 0.062308089959988135,
118
  "values": [
119
- 0.042917966000004526,
120
- 0.042082105000019965,
121
- 0.041132832000016606,
122
- 0.050883822000002965,
123
- 0.061289504000001216,
124
- 0.056013610999997354,
125
- 0.06253168199998527,
126
- 0.05864898200002244,
127
- 0.05272977900000342,
128
- 0.05429052300002013,
129
- 0.0546982840000112,
130
- 0.053818511999992324,
131
- 0.04984101499999838,
132
- 0.05463972499998704,
133
- 0.05156806000002234,
134
- 0.05304324400000837,
135
- 0.05266239300001985,
136
- 0.05478655899997875,
137
- 0.05315935100000502
 
 
 
 
 
 
 
138
  ]
139
  },
140
  "throughput": {
141
  "unit": "samples/s",
142
- "value": 18.985989308174176
143
  },
144
  "energy": {
145
  "unit": "kWh",
146
- "cpu": 1.8536311608773693e-06,
147
- "ram": 7.746148890405602e-08,
148
  "gpu": 0.0,
149
- "total": 1.9310926497814255e-06
150
  },
151
  "efficiency": {
152
  "unit": "samples/kWh",
153
- "value": 517841.5443263103
154
  }
155
  }
156
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.0",
83
+ "optimum_benchmark_commit": "20967d442ecbde73b309c993163c266eac5ccef4",
84
  "transformers_version": "4.40.2",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.30.1",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 941.830144,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 26,
111
+ "total": 1.0241017170000646,
112
+ "mean": 0.03938852757692556,
113
+ "stdev": 0.0009090209638033199,
114
+ "p50": 0.03920731699997759,
115
+ "p90": 0.03963646399998311,
116
+ "p95": 0.03984272700000702,
117
+ "p99": 0.04276906250001389,
118
  "values": [
119
+ 0.039787550999960786,
120
+ 0.039046206000023176,
121
+ 0.03925625800002308,
122
+ 0.03893408599998338,
123
+ 0.038933315000008406,
124
+ 0.039403514000014184,
125
+ 0.03906951900000877,
126
+ 0.039278419999959624,
127
+ 0.03923857600000247,
128
+ 0.04373837700001104,
129
+ 0.03915436799996996,
130
+ 0.03934350200000836,
131
+ 0.039165828999955465,
132
+ 0.03933524500001795,
133
+ 0.03933453500002315,
134
+ 0.03917605799995272,
135
+ 0.03948537700000543,
136
+ 0.039155380000011064,
137
+ 0.03986111900002243,
138
+ 0.039419935000012174,
139
+ 0.039041365999992195,
140
+ 0.039159277000010206,
141
+ 0.03901637000001301,
142
+ 0.039366785000026994,
143
+ 0.038784917000043606,
144
+ 0.03861583200000496
145
  ]
146
  },
147
  "throughput": {
148
  "unit": "samples/s",
149
+ "value": 25.3881031233525
150
  },
151
  "energy": {
152
  "unit": "kWh",
153
+ "cpu": 1.5062757027454869e-06,
154
+ "ram": 6.295007615710947e-08,
155
  "gpu": 0.0,
156
+ "total": 1.5692257789025965e-06
157
  },
158
  "efficiency": {
159
  "unit": "samples/kWh",
160
+ "value": 637256.9285086101
161
  }
162
  }
163
  }