IlyasMoutawwakil HF staff commited on
Commit
4a04368
1 Parent(s): ace77bc

Upload cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 939.216896,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,55 +10,52 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 24,
14
- "total": 1.0141399070001285,
15
- "mean": 0.04225582945833869,
16
- "stdev": 0.0009135129726387775,
17
- "p50": 0.0420419564999861,
18
- "p90": 0.042964944400040395,
19
- "p95": 0.04329109960001176,
20
- "p99": 0.04527258731999893,
21
  "values": [
22
- 0.04245597999999973,
23
- 0.04291501200003722,
24
- 0.04255576500003144,
25
- 0.042231933999971716,
26
- 0.04200119600000107,
27
- 0.0419489189999922,
28
- 0.04261303200001976,
29
- 0.04175353699997686,
30
- 0.04159689499999786,
31
- 0.042283009000016136,
32
- 0.04183869500002402,
33
- 0.04584839599999668,
34
- 0.042986344000041754,
35
- 0.04170609099998046,
36
- 0.04141746200002672,
37
- 0.04143089699999791,
38
- 0.041871124000010695,
39
- 0.04217910599999186,
40
- 0.042082716999971126,
41
- 0.041528578999987076,
42
- 0.04131163700003526,
43
- 0.04169398599998431,
44
- 0.04334488000000647,
45
- 0.042544714000030126
46
  ]
47
  },
48
  "throughput": {
49
  "unit": "samples/s",
50
- "value": 23.665373815130774
51
  },
52
  "energy": {
53
  "unit": "kWh",
54
- "cpu": 1.5796790122985842e-06,
55
- "ram": 6.601759091952318e-08,
56
  "gpu": 0.0,
57
- "total": 1.6456966032181072e-06
58
  },
59
  "efficiency": {
60
  "unit": "samples/kWh",
61
- "value": 607645.4177790317
62
  }
63
  }
64
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 940.347392,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 21,
14
+ "total": 1.0063083560000337,
15
+ "mean": 0.047919445523811124,
16
+ "stdev": 0.0008247789343517792,
17
+ "p50": 0.04781964699998298,
18
+ "p90": 0.04889871500000709,
19
+ "p95": 0.049182502000007844,
20
+ "p99": 0.04928782599997703,
21
  "values": [
22
+ 0.04931415699996933,
23
+ 0.04781379600001401,
24
+ 0.04889871500000709,
25
+ 0.04678143699999282,
26
+ 0.04877974300001142,
27
+ 0.04781964699998298,
28
+ 0.047574340999972264,
29
+ 0.049182502000007844,
30
+ 0.04599916199998688,
31
+ 0.04818676000002142,
32
+ 0.048317803999964326,
33
+ 0.047561407000046074,
34
+ 0.048549063999985265,
35
+ 0.048445301000015206,
36
+ 0.04696305499999198,
37
+ 0.04688965799999778,
38
+ 0.0484982890000083,
39
+ 0.04803115100003197,
40
+ 0.04774960800000372,
41
+ 0.047748094999974455,
42
+ 0.047204664000048524
 
 
 
43
  ]
44
  },
45
  "throughput": {
46
  "unit": "samples/s",
47
+ "value": 20.868354987603123
48
  },
49
  "energy": {
50
  "unit": "kWh",
51
+ "cpu": 1.5859182145860461e-06,
52
+ "ram": 6.627782124165832e-08,
53
  "gpu": 0.0,
54
+ "total": 1.6521960358277045e-06
55
  },
56
  "efficiency": {
57
  "unit": "samples/kWh",
58
+ "value": 605255.0534652673
59
  }
60
  }
61
  }