IlyasMoutawwakil HF staff commited on
Commit
9be2b89
1 Parent(s): 450233a

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 937.025536,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,64 +10,60 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 33,
14
- "total": 1.0114617630001135,
15
- "mean": 0.03065035645454889,
16
- "stdev": 0.0011875507030811637,
17
- "p50": 0.030365853000034804,
18
- "p90": 0.031478325200009746,
19
- "p95": 0.03264780499998778,
20
- "p99": 0.03502172852001422,
21
  "values": [
22
- 0.03154161700001623,
23
- 0.03082063200002949,
24
- 0.030669419999981073,
25
- 0.030112801000029776,
26
- 0.029460423999978502,
27
- 0.029696754000042347,
28
- 0.030662255999970967,
29
- 0.030084738999960337,
30
- 0.03064913099996147,
31
- 0.030869894000034037,
32
- 0.030288358000007065,
33
- 0.03023467799999935,
34
- 0.030057548000002043,
35
- 0.030521123000028183,
36
- 0.030217246000006526,
37
- 0.030705667000006542,
38
- 0.030365853000034804,
39
- 0.030133068999987245,
40
- 0.031646572999989075,
41
- 0.03043232700002818,
42
- 0.03031435700000884,
43
- 0.03053655200000094,
44
- 0.031119548999981816,
45
- 0.035432117000027574,
46
- 0.030814829999997073,
47
- 0.029846775000009984,
48
- 0.029858066000031158,
49
- 0.029664915000012115,
50
- 0.029774008999993384,
51
- 0.03003562700001794,
52
- 0.03122515799998382,
53
- 0.03414965299998585,
54
- 0.02952004499996974
55
  ]
56
  },
57
  "throughput": {
58
  "unit": "samples/s",
59
- "value": 32.62604797053144
60
  },
61
  "energy": {
62
  "unit": "kWh",
63
- "cpu": 1.1624875021915811e-06,
64
- "ram": 4.858249665847651e-08,
65
  "gpu": 0.0,
66
- "total": 1.2110699988500574e-06
67
  },
68
  "efficiency": {
69
  "unit": "samples/kWh",
70
- "value": 825716.1030737497
71
  }
72
  }
73
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 936.8576,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 29,
14
+ "total": 1.0165581000000543,
15
+ "mean": 0.03505372758620877,
16
+ "stdev": 0.0009159489526071956,
17
+ "p50": 0.034928329000024405,
18
+ "p90": 0.03638322659998039,
19
+ "p95": 0.036566704400036086,
20
+ "p99": 0.03685525792003091,
21
  "values": [
22
+ 0.03553541299999097,
23
+ 0.034703830000012204,
24
+ 0.034620413999959965,
25
+ 0.035291958000016166,
26
+ 0.03420142200002374,
27
+ 0.03441236700001582,
28
+ 0.034928329000024405,
29
+ 0.034950048999974115,
30
+ 0.034483508999983314,
31
+ 0.03560146500001338,
32
+ 0.035092415999997684,
33
+ 0.03252300100001548,
34
+ 0.0351824020000322,
35
+ 0.034734517999993386,
36
+ 0.03420500900000434,
37
+ 0.034816250000005766,
38
+ 0.03559958099998539,
39
+ 0.03366153500002156,
40
+ 0.035485188999984985,
41
+ 0.03484201700001677,
42
+ 0.03407772200000636,
43
+ 0.03484153699997705,
44
+ 0.03555395600000111,
45
+ 0.03472743499997932,
46
+ 0.036438257000042995,
47
+ 0.036934172000030685,
48
+ 0.03665233600003148,
49
+ 0.03636946899996474,
50
+ 0.036092541999948935
 
 
 
 
51
  ]
52
  },
53
  "throughput": {
54
  "unit": "samples/s",
55
+ "value": 28.527636541382584
56
  },
57
  "energy": {
58
  "unit": "kWh",
59
+ "cpu": 1.165485576866499e-06,
60
+ "ram": 4.87078374442346e-08,
61
  "gpu": 0.0,
62
+ "total": 1.2141934143107336e-06
63
  },
64
  "efficiency": {
65
  "unit": "samples/kWh",
66
+ "value": 823592.014430151
67
  }
68
  }
69
  }