IlyasMoutawwakil HF staff commited on
Commit
7b2ddf0
1 Parent(s): 9384214

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 937.49248,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -11,57 +11,57 @@
11
  "latency": {
12
  "unit": "s",
13
  "count": 27,
14
- "total": 1.0255669960001228,
15
- "mean": 0.03798396281481936,
16
- "stdev": 0.0013657641120332562,
17
- "p50": 0.03817636399998037,
18
- "p90": 0.03896361340000567,
19
- "p95": 0.03951649189999103,
20
- "p99": 0.04031425351999132,
21
  "values": [
22
- 0.038789833999999246,
23
- 0.037985176000006504,
24
- 0.03855480300001091,
25
- 0.03869269200004055,
26
- 0.03647314000005508,
27
- 0.03890146300000197,
28
- 0.03817636399998037,
29
- 0.03971348599998237,
30
- 0.038459595000006175,
31
- 0.03883157199999232,
32
- 0.03855275899996968,
33
- 0.03571738400000868,
34
- 0.038099398999975165,
35
- 0.03821538799996915,
36
- 0.03765985500001534,
37
- 0.03812939599998799,
38
- 0.03905683900001122,
39
- 0.03849970999999641,
40
- 0.037342201000001296,
41
- 0.038257356000030995,
42
- 0.037666808000039964,
43
- 0.03800090600003614,
44
- 0.03811510900004578,
45
- 0.03757702899997639,
46
- 0.03666334799999049,
47
- 0.04052533399999447,
48
- 0.03291004999999814
49
  ]
50
  },
51
  "throughput": {
52
  "unit": "samples/s",
53
- "value": 26.326900246697065
54
  },
55
  "energy": {
56
  "unit": "kWh",
57
- "cpu": 1.2473264502154457e-06,
58
- "ram": 5.212426219828536e-08,
59
  "gpu": 0.0,
60
- "total": 1.299450712413731e-06
61
  },
62
  "efficiency": {
63
  "unit": "samples/kWh",
64
- "value": 769555.9288605098
65
  }
66
  }
67
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 936.808448,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
11
  "latency": {
12
  "unit": "s",
13
  "count": 27,
14
+ "total": 1.0110681230000296,
15
+ "mean": 0.037446967518519614,
16
+ "stdev": 0.000688873697815681,
17
+ "p50": 0.03757569100002911,
18
+ "p90": 0.03811274559999447,
19
+ "p95": 0.03818280149999964,
20
+ "p99": 0.038317894080009865,
21
  "values": [
22
+ 0.03812847099999317,
23
+ 0.036572600999988936,
24
+ 0.037022318999959225,
25
+ 0.0373983699999485,
26
+ 0.035084966999988865,
27
+ 0.038102261999995335,
28
+ 0.03739908200003583,
29
+ 0.03702988300000243,
30
+ 0.0368103839999776,
31
+ 0.03757569100002911,
32
+ 0.03631468000003224,
33
+ 0.03701594699998623,
34
+ 0.0376170079999838,
35
+ 0.03805969300003653,
36
+ 0.03794287499999882,
37
+ 0.03766147999999703,
38
+ 0.0376612800000089,
39
+ 0.0375501329999679,
40
+ 0.03729028800000833,
41
+ 0.03783137099998157,
42
+ 0.037796753000009176,
43
+ 0.03835717800001248,
44
+ 0.038087705000009464,
45
+ 0.0374619789999997,
46
+ 0.038206086000002415,
47
+ 0.038016283000047224,
48
+ 0.03707335400002876
49
  ]
50
  },
51
  "throughput": {
52
  "unit": "samples/s",
53
+ "value": 26.704432061299602
54
  },
55
  "energy": {
56
  "unit": "kWh",
57
+ "cpu": 1.2579312033977987e-06,
58
+ "ram": 5.257019776060419e-08,
59
  "gpu": 0.0,
60
+ "total": 1.310501401158403e-06
61
  },
62
  "efficiency": {
63
  "unit": "samples/kWh",
64
+ "value": 763066.7156220216
65
  }
66
  }
67
  }