IlyasMoutawwakil HF staff commited on
Commit
c8cce44
1 Parent(s): dee4985

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -78,7 +78,7 @@
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.0",
81
- "optimum_benchmark_commit": "420ddf2d65dc94a93273eacd14ac0bd4f4b6cef7",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
@@ -97,7 +97,7 @@
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
- "max_ram": 937.009152,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
@@ -105,60 +105,59 @@
105
  },
106
  "latency": {
107
  "unit": "s",
108
- "count": 29,
109
- "total": 1.0096047679998605,
110
- "mean": 0.03481395751723657,
111
- "stdev": 0.0006038408803691896,
112
- "p50": 0.0349625489999994,
113
- "p90": 0.035284980199958224,
114
- "p95": 0.03557195819998924,
115
- "p99": 0.035902999560003084,
116
  "values": [
117
- 0.03514089199995851,
118
- 0.03522858699994913,
119
- 0.034717309999962254,
120
- 0.035612894999985656,
121
- 0.033359862999986944,
122
- 0.03482869900000196,
123
- 0.035013885000012124,
124
- 0.034822657999995954,
125
- 0.03513603300001478,
126
- 0.03497156600002427,
127
- 0.03494731099999626,
128
- 0.033305181000002904,
129
- 0.0349625489999994,
130
- 0.035120154000026105,
131
- 0.034556238999982725,
132
- 0.034484486000053494,
133
- 0.035091369999975086,
134
- 0.03382867999999917,
135
- 0.03521480999995674,
136
- 0.033672808000005716,
137
- 0.034871439000028204,
138
- 0.03492828499997813,
139
- 0.03476965899994866,
140
- 0.03505422999995744,
141
- 0.03439435699999649,
142
- 0.03503194800003939,
143
- 0.035510552999994616,
144
- 0.03601581800000986,
145
- 0.03501250300001857
146
  ]
147
  },
148
  "throughput": {
149
  "unit": "samples/s",
150
- "value": 28.72411157234551
151
  },
152
  "energy": {
153
  "unit": "kWh",
154
- "cpu": 1.1410578091939292e-06,
155
- "ram": 4.76849839694101e-08,
156
  "gpu": 0.0,
157
- "total": 1.1887427931633394e-06
158
  },
159
  "efficiency": {
160
  "unit": "samples/kWh",
161
- "value": 841224.8686184841
162
  }
163
  }
164
  }
 
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.0",
81
+ "optimum_benchmark_commit": "553a573935b71390502b7c324bca2f06f9fbc412",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
 
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
+ "max_ram": 936.370176,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
 
105
  },
106
  "latency": {
107
  "unit": "s",
108
+ "count": 28,
109
+ "total": 1.0006047379999359,
110
+ "mean": 0.03573588349999771,
111
+ "stdev": 0.0016637553398426057,
112
+ "p50": 0.03622782299999017,
113
+ "p90": 0.03738745959998937,
114
+ "p95": 0.03756524419999607,
115
+ "p99": 0.03760860288998003,
116
  "values": [
117
+ 0.036516429000016615,
118
+ 0.03575793199996724,
119
+ 0.03554674800000157,
120
+ 0.036546904999966046,
121
+ 0.034678901999996015,
122
+ 0.03574436299999206,
123
+ 0.03576768599998559,
124
+ 0.03761713299996927,
125
+ 0.03732741999999689,
126
+ 0.036384865000002264,
127
+ 0.03648150499998337,
128
+ 0.03758554000000913,
129
+ 0.03596258900000748,
130
+ 0.036338688999990154,
131
+ 0.03623097899998129,
132
+ 0.03584072300003527,
133
+ 0.0362976730000355,
134
+ 0.03590629500001796,
135
+ 0.036584296000000904,
136
+ 0.03752755199997182,
137
+ 0.036224666999999044,
138
+ 0.03637293399998498,
139
+ 0.035781601999985924,
140
+ 0.03641234600002008,
141
+ 0.03516361199996254,
142
+ 0.031634014000019306,
143
+ 0.031222558000024492,
144
+ 0.03114878100001306
 
145
  ]
146
  },
147
  "throughput": {
148
  "unit": "samples/s",
149
+ "value": 27.983077569638485
150
  },
151
  "energy": {
152
  "unit": "kWh",
153
+ "cpu": 1.217152136895392e-06,
154
+ "ram": 5.086647873736361e-08,
155
  "gpu": 0.0,
156
+ "total": 1.2680186156327555e-06
157
  },
158
  "efficiency": {
159
  "unit": "samples/kWh",
160
+ "value": 788631.9551397034
161
  }
162
  }
163
  }