IlyasMoutawwakil HF staff commited on
Commit
b79d7e3
1 Parent(s): ff42133

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -78,7 +78,7 @@
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.1",
81
- "optimum_benchmark_commit": "2516ce57a5b64eefeb78dc75a171e0cdff88823e",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
@@ -97,7 +97,7 @@
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
- "max_ram": 936.808448,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
@@ -105,60 +105,59 @@
105
  },
106
  "latency": {
107
  "unit": "s",
108
- "count": 29,
109
- "total": 1.0147147829998744,
110
- "mean": 0.03499016493103015,
111
- "stdev": 0.0007279547982049233,
112
- "p50": 0.03503617699999495,
113
- "p90": 0.03573674660002553,
114
- "p95": 0.03618377639999153,
115
- "p99": 0.03629199856001151,
116
  "values": [
117
- 0.03631409000001895,
118
- 0.035046595999972396,
119
- 0.03451718899998468,
120
- 0.036106652999990274,
121
- 0.03358430999998063,
122
- 0.03516179100000727,
123
- 0.034629198000004635,
124
- 0.03514327699997466,
125
- 0.03498393999996097,
126
- 0.03543017199996257,
127
- 0.035018965999995544,
128
- 0.03289524500002017,
129
- 0.03562841100000469,
130
- 0.034976516999961405,
131
- 0.03501180199998544,
132
- 0.03463143200002605,
133
- 0.03503617699999495,
134
- 0.03374574099996153,
135
- 0.03519043500000407,
136
- 0.034186072000011336,
137
- 0.03514610100000937,
138
- 0.03539599700002327,
139
- 0.03501294300002655,
140
- 0.035431072999983826,
141
- 0.03514369700002362,
142
- 0.03450180999999475,
143
- 0.03564427000003434,
144
- 0.03623519199999237,
145
- 0.03496568599996408
146
  ]
147
  },
148
  "throughput": {
149
  "unit": "samples/s",
150
- "value": 28.579459455853407
151
  },
152
  "energy": {
153
  "unit": "kWh",
154
- "cpu": 1.1748274946524428e-06,
155
- "ram": 4.909816506872075e-08,
156
  "gpu": 0.0,
157
- "total": 1.2239256597211635e-06
158
  },
159
  "efficiency": {
160
  "unit": "samples/kWh",
161
- "value": 817043.0875906479
162
  }
163
  }
164
  }
 
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.1",
81
+ "optimum_benchmark_commit": "0b24af9d7b7751f74b160dfade73ef78e10964d6",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
 
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
+ "max_ram": 936.378368,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
 
105
  },
106
  "latency": {
107
  "unit": "s",
108
+ "count": 28,
109
+ "total": 1.0318560470000762,
110
+ "mean": 0.03685200167857415,
111
+ "stdev": 0.0007365907624244915,
112
+ "p50": 0.03712659000001395,
113
+ "p90": 0.037604654899990916,
114
+ "p95": 0.03778203910001991,
115
+ "p99": 0.037902372900020395,
116
  "values": [
117
+ 0.03730902400002378,
118
+ 0.03711104100000284,
119
+ 0.03782898600002227,
120
+ 0.037296219999973346,
121
+ 0.03519177999999101,
122
+ 0.03684077099995875,
123
+ 0.03648704300002237,
124
+ 0.03665597199994863,
125
+ 0.036056962000031945,
126
+ 0.037018546000012975,
127
+ 0.035860350999996626,
128
+ 0.0355283939999822,
129
+ 0.03752841900001158,
130
+ 0.037215889000037805,
131
+ 0.03719487900002605,
132
+ 0.036828968999998324,
133
+ 0.037565998999980366,
134
+ 0.035797451999997065,
135
+ 0.03744461100001217,
136
+ 0.03578372599997692,
137
+ 0.03714795100000856,
138
+ 0.03743923000001814,
139
+ 0.03683505000003606,
140
+ 0.037142139000025054,
141
+ 0.03583278799999334,
142
+ 0.037694852000015544,
143
+ 0.0379295160000197,
144
+ 0.037289486999952715
 
145
  ]
146
  },
147
  "throughput": {
148
  "unit": "samples/s",
149
+ "value": 27.135568068244243
150
  },
151
  "energy": {
152
  "unit": "kWh",
153
+ "cpu": 1.2370425379938551e-06,
154
+ "ram": 5.169770543869134e-08,
155
  "gpu": 0.0,
156
+ "total": 1.2887402434325465e-06
157
  },
158
  "efficiency": {
159
  "unit": "samples/kWh",
160
+ "value": 775951.5581949317
161
  }
162
  }
163
  }