IlyasMoutawwakil HF staff commited on
Commit
f8f9349
·
verified ·
1 Parent(s): eef24a3

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -78,7 +78,7 @@
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.1",
81
- "optimum_benchmark_commit": "c1d0b062e90b79e7705510c58cea731c0d90da8a",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
@@ -97,7 +97,7 @@
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
- "max_ram": 939.307008,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
@@ -105,56 +105,53 @@
105
  },
106
  "latency": {
107
  "unit": "s",
108
- "count": 25,
109
- "total": 1.007021844999997,
110
- "mean": 0.04028087379999988,
111
- "stdev": 0.0004709287670471039,
112
- "p50": 0.04019808400002489,
113
- "p90": 0.04071995439999796,
114
- "p95": 0.04116332200001693,
115
- "p99": 0.04166326476000336,
116
  "values": [
117
- 0.040897569999998495,
118
- 0.040453530999997156,
119
- 0.04013799100002302,
120
- 0.04012321499999416,
121
- 0.04029214999999908,
122
- 0.04019808400002489,
123
- 0.0403101230000118,
124
- 0.04019155099999239,
125
- 0.03995409899999913,
126
- 0.040100883000008025,
127
- 0.040419867999986536,
128
- 0.040447389999997085,
129
- 0.04028519700000288,
130
- 0.04015023400000928,
131
- 0.03998346399998809,
132
- 0.04005248299998243,
133
- 0.04005238199999894,
134
- 0.04032409899997447,
135
- 0.04026687300000731,
136
- 0.04026273099998434,
137
- 0.04015047499999014,
138
- 0.03972366299998953,
139
- 0.041800160999997615,
140
- 0.04122976000002154,
141
- 0.03921386800001869
142
  ]
143
  },
144
  "throughput": {
145
  "unit": "samples/s",
146
- "value": 24.82567793750301
147
  },
148
  "energy": {
149
  "unit": "kWh",
150
- "cpu": 1.5316270355485445e-06,
151
- "ram": 6.40096037042781e-08,
152
  "gpu": 0.0,
153
- "total": 1.5956366392528227e-06
154
  },
155
  "efficiency": {
156
  "unit": "samples/kWh",
157
- "value": 626709.0986756627
158
  }
159
  }
160
  }
 
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.1",
81
+ "optimum_benchmark_commit": "48414f58841d7ba7c7fd42d74fd524d1d23c3081",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
 
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
+ "max_ram": 940.01152,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
 
105
  },
106
  "latency": {
107
  "unit": "s",
108
+ "count": 22,
109
+ "total": 1.0055013999999005,
110
+ "mean": 0.04570460909090457,
111
+ "stdev": 0.002044207790654603,
112
+ "p50": 0.045584504000004245,
113
+ "p90": 0.047683622399978275,
114
+ "p95": 0.04780703294997579,
115
+ "p99": 0.04800947760000838,
116
  "values": [
117
+ 0.04762605199999825,
118
+ 0.04622490400004153,
119
+ 0.04733890599999313,
120
+ 0.045397739000009096,
121
+ 0.047688167999979214,
122
+ 0.04741648999998915,
123
+ 0.04764271199996983,
124
+ 0.047277360000009594,
125
+ 0.0453443190000371,
126
+ 0.04738351899999316,
127
+ 0.04781328899997561,
128
+ 0.04806162900001709,
129
+ 0.0453284809999559,
130
+ 0.04485566799996832,
131
+ 0.04400032099999862,
132
+ 0.04476101099999141,
133
+ 0.04577126899999939,
134
+ 0.044464947999983906,
135
+ 0.04494520600002261,
136
+ 0.04492509499999642,
137
+ 0.040697963000013715,
138
+ 0.04053635099995745
 
 
 
139
  ]
140
  },
141
  "throughput": {
142
  "unit": "samples/s",
143
+ "value": 21.879631395841095
144
  },
145
  "energy": {
146
  "unit": "kWh",
147
+ "cpu": 1.5785319010416668e-06,
148
+ "ram": 6.596945329850996e-08,
149
  "gpu": 0.0,
150
+ "total": 1.6445013543401768e-06
151
  },
152
  "efficiency": {
153
  "unit": "samples/kWh",
154
+ "value": 608087.0638146905
155
  }
156
  }
157
  }