IlyasMoutawwakil HF staff commited on
Commit
ca02616
1 Parent(s): c500246

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -80,7 +80,7 @@
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.1",
83
- "optimum_benchmark_commit": "3b8c49a169ebd79001b2a83fbf2b332612417102",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
@@ -99,7 +99,7 @@
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
- "max_ram": 941.084672,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
@@ -107,53 +107,54 @@
107
  },
108
  "latency": {
109
  "unit": "s",
110
- "count": 22,
111
- "total": 1.0251077170000258,
112
- "mean": 0.04659580531818299,
113
- "stdev": 0.0023014526536519666,
114
- "p50": 0.04696745949999581,
115
- "p90": 0.04885256429998321,
116
- "p95": 0.049290728549988214,
117
- "p99": 0.04973596296999972,
118
  "values": [
119
- 0.04892681299998003,
120
- 0.04707428400001845,
121
- 0.04728387599999451,
122
- 0.04797170000000506,
123
- 0.04700219900001912,
124
- 0.046932719999972505,
125
- 0.04630144099996869,
126
- 0.046472570999981144,
127
- 0.04818432600001188,
128
- 0.04682899599998791,
129
- 0.04655274999998937,
130
- 0.04984922500000266,
131
- 0.04675453700002663,
132
- 0.047847076999971705,
133
- 0.047436500000003434,
134
- 0.04624118899999985,
135
- 0.04710199599998077,
136
- 0.046273438000014266,
137
- 0.04930988199998865,
138
- 0.044087377000039396,
139
- 0.04004066100003456,
140
- 0.0406341590000352
 
141
  ]
142
  },
143
  "throughput": {
144
  "unit": "samples/s",
145
- "value": 21.46115928615085
146
  },
147
  "energy": {
148
  "unit": "kWh",
149
- "cpu": 1.5527764446715007e-06,
150
- "ram": 6.489326515634274e-08,
151
  "gpu": 0.0,
152
- "total": 1.6176697098278435e-06
153
  },
154
  "efficiency": {
155
  "unit": "samples/kWh",
156
- "value": 618173.1622498034
157
  }
158
  }
159
  }
 
80
  "processor": "x86_64",
81
  "python_version": "3.10.14",
82
  "optimum_benchmark_version": "0.2.1",
83
+ "optimum_benchmark_commit": "3731aa19b0b76022fb42f78436721c579f50c777",
84
  "transformers_version": "4.42.3",
85
  "transformers_commit": null,
86
  "accelerate_version": "0.31.0",
 
99
  "forward": {
100
  "memory": {
101
  "unit": "MB",
102
+ "max_ram": 940.60544,
103
  "max_global_vram": null,
104
  "max_process_vram": null,
105
  "max_reserved": null,
 
107
  },
108
  "latency": {
109
  "unit": "s",
110
+ "count": 23,
111
+ "total": 1.0075318320000406,
112
+ "mean": 0.04380573182608872,
113
+ "stdev": 0.0028614411417156285,
114
+ "p50": 0.044998319000001175,
115
+ "p90": 0.04590145940003367,
116
+ "p95": 0.04605551700000774,
117
+ "p99": 0.04655986413998221,
118
  "values": [
119
+ 0.04669832399997631,
120
+ 0.04506544999998141,
121
+ 0.044602386999997634,
122
+ 0.04576921299997139,
123
+ 0.04541668500002061,
124
+ 0.04532359099999894,
125
+ 0.044050789000039,
126
+ 0.045284729000002244,
127
+ 0.04606896100000313,
128
+ 0.04490662500001008,
129
+ 0.045599987000002784,
130
+ 0.045934521000049244,
131
+ 0.045100214000001415,
132
+ 0.045747851999976774,
133
+ 0.04473391199996968,
134
+ 0.044998319000001175,
135
+ 0.044944550000025174,
136
+ 0.0446361460000162,
137
+ 0.04026164200001858,
138
+ 0.038125396999987515,
139
+ 0.03803132100000539,
140
+ 0.03808979999996609,
141
+ 0.03814141700001983
142
  ]
143
  },
144
  "throughput": {
145
  "unit": "samples/s",
146
+ "value": 22.828062865609862
147
  },
148
  "energy": {
149
  "unit": "kWh",
150
+ "cpu": 1.4831602818681383e-06,
151
+ "ram": 6.198416202323642e-08,
152
  "gpu": 0.0,
153
+ "total": 1.5451444438913747e-06
154
  },
155
  "efficiency": {
156
  "unit": "samples/kWh",
157
+ "value": 647188.6844970599
158
  }
159
  }
160
  }