IlyasMoutawwakil HF staff commited on
Commit
36d79c1
1 Parent(s): a96c82f

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -78,7 +78,7 @@
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.0",
81
- "optimum_benchmark_commit": "2db7e171d23768cdeb341987283e17e0b2db9bd9",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
@@ -97,7 +97,7 @@
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
- "max_ram": 936.472576,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
@@ -105,60 +105,58 @@
105
  },
106
  "latency": {
107
  "unit": "s",
108
- "count": 29,
109
- "total": 1.0298660819999554,
110
- "mean": 0.03551262351723984,
111
- "stdev": 0.0007184867090388221,
112
- "p50": 0.035633279000023776,
113
- "p90": 0.03630524660003402,
114
- "p95": 0.036554439200006075,
115
- "p99": 0.036978298159999666,
116
  "values": [
117
- 0.03578256700001248,
118
- 0.0350099760000262,
119
- 0.03513210500000241,
120
- 0.03594516099997236,
121
- 0.03369363100000555,
122
- 0.03574113999997053,
123
- 0.03572189400000525,
124
- 0.03608752600001708,
125
- 0.03501352399996449,
126
- 0.03653633300001502,
127
- 0.03517028599998184,
128
- 0.03464579799998546,
129
- 0.03583062699999573,
130
- 0.035376369999994495,
131
- 0.0358358580000413,
132
- 0.03551335599996719,
133
- 0.03538277199999129,
134
- 0.035430491999989044,
135
- 0.03498429799998348,
136
- 0.03427149899999904,
137
- 0.035711404999972274,
138
- 0.036038675000042986,
139
- 0.035633279000023776,
140
- 0.036247475000038776,
141
- 0.03467128400001229,
142
- 0.03656651000000011,
143
- 0.037138437999999496,
144
- 0.03607525399996803,
145
- 0.034678548999977465
146
  ]
147
  },
148
  "throughput": {
149
  "unit": "samples/s",
150
- "value": 28.159000968051355
151
  },
152
  "energy": {
153
  "unit": "kWh",
154
- "cpu": 1.1847706354828397e-06,
155
- "ram": 4.951349857482124e-08,
156
  "gpu": 0.0,
157
- "total": 1.2342841340576607e-06
158
  },
159
  "efficiency": {
160
  "unit": "samples/kWh",
161
- "value": 810186.2224482617
162
  }
163
  }
164
  }
 
78
  "processor": "x86_64",
79
  "python_version": "3.10.14",
80
  "optimum_benchmark_version": "0.2.0",
81
+ "optimum_benchmark_commit": "b92e3e6e00670fa1351d9aef1be896254e5f33e0",
82
  "transformers_version": "4.40.2",
83
  "transformers_commit": null,
84
  "accelerate_version": "0.30.1",
 
97
  "forward": {
98
  "memory": {
99
  "unit": "MB",
100
+ "max_ram": 937.49248,
101
  "max_global_vram": null,
102
  "max_process_vram": null,
103
  "max_reserved": null,
 
105
  },
106
  "latency": {
107
  "unit": "s",
108
+ "count": 27,
109
+ "total": 1.0255669960001228,
110
+ "mean": 0.03798396281481936,
111
+ "stdev": 0.0013657641120332562,
112
+ "p50": 0.03817636399998037,
113
+ "p90": 0.03896361340000567,
114
+ "p95": 0.03951649189999103,
115
+ "p99": 0.04031425351999132,
116
  "values": [
117
+ 0.038789833999999246,
118
+ 0.037985176000006504,
119
+ 0.03855480300001091,
120
+ 0.03869269200004055,
121
+ 0.03647314000005508,
122
+ 0.03890146300000197,
123
+ 0.03817636399998037,
124
+ 0.03971348599998237,
125
+ 0.038459595000006175,
126
+ 0.03883157199999232,
127
+ 0.03855275899996968,
128
+ 0.03571738400000868,
129
+ 0.038099398999975165,
130
+ 0.03821538799996915,
131
+ 0.03765985500001534,
132
+ 0.03812939599998799,
133
+ 0.03905683900001122,
134
+ 0.03849970999999641,
135
+ 0.037342201000001296,
136
+ 0.038257356000030995,
137
+ 0.037666808000039964,
138
+ 0.03800090600003614,
139
+ 0.03811510900004578,
140
+ 0.03757702899997639,
141
+ 0.03666334799999049,
142
+ 0.04052533399999447,
143
+ 0.03291004999999814
 
 
144
  ]
145
  },
146
  "throughput": {
147
  "unit": "samples/s",
148
+ "value": 26.326900246697065
149
  },
150
  "energy": {
151
  "unit": "kWh",
152
+ "cpu": 1.2473264502154457e-06,
153
+ "ram": 5.212426219828536e-08,
154
  "gpu": 0.0,
155
+ "total": 1.299450712413731e-06
156
  },
157
  "efficiency": {
158
  "unit": "samples/kWh",
159
+ "value": 769555.9288605098
160
  }
161
  }
162
  }