IlyasMoutawwakil HF staff commited on
Commit
424c1d2
1 Parent(s): e536349

Upload cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cpu_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json CHANGED
@@ -2,7 +2,7 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 939.061248,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
@@ -10,58 +10,57 @@
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 27,
14
- "total": 1.0046774289998552,
15
- "mean": 0.037210275148142784,
16
- "stdev": 0.002973652806825925,
17
- "p50": 0.03872174299999642,
18
- "p90": 0.03994491459997107,
19
- "p95": 0.0401707419999866,
20
- "p99": 0.040761405499987406,
21
  "values": [
22
- 0.040256817999988925,
23
- 0.03878219799997851,
24
- 0.03714330799999743,
25
- 0.03932511099998237,
26
- 0.03824359199995797,
27
- 0.039282150000019556,
28
- 0.03872174299999642,
29
- 0.039969897999981185,
30
- 0.04093869299998687,
31
- 0.039546208999979626,
32
- 0.03858397400000513,
33
- 0.03992825899996433,
34
- 0.03932582199996659,
35
- 0.038750848999995924,
36
- 0.038841681000008066,
37
- 0.03933889699999327,
38
- 0.039409901999988506,
39
- 0.03835199700000658,
40
- 0.037515691000010065,
41
- 0.03340292699999736,
42
- 0.03348249800001213,
43
- 0.033424458000013146,
44
- 0.03309147100003429,
45
- 0.03203313500000604,
46
- 0.032101354000019455,
47
- 0.032825498999955016,
48
- 0.03205929500001048
49
  ]
50
  },
51
  "throughput": {
52
  "unit": "samples/s",
53
- "value": 26.874297382074353
54
  },
55
  "energy": {
56
  "unit": "kWh",
57
- "cpu": 1.3751970853842083e-06,
58
- "ram": 5.7470542370216706e-08,
59
  "gpu": 0.0,
60
- "total": 1.432667627754425e-06
61
  },
62
  "efficiency": {
63
  "unit": "samples/kWh",
64
- "value": 697998.6010903368
65
  }
66
  }
67
  }
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 942.96064,
6
  "max_global_vram": null,
7
  "max_process_vram": null,
8
  "max_reserved": null,
 
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 26,
14
+ "total": 1.0184859130001769,
15
+ "mean": 0.03917253511539142,
16
+ "stdev": 0.0014329893020697767,
17
+ "p50": 0.03929683799998429,
18
+ "p90": 0.04072265549999088,
19
+ "p95": 0.04214556700001992,
20
+ "p99": 0.042363420249984074,
21
  "values": [
22
+ 0.03942920000002914,
23
+ 0.03924314300002152,
24
+ 0.03674496700000418,
25
+ 0.03995215399999097,
26
+ 0.0395850209999935,
27
+ 0.042363037000029635,
28
+ 0.03939356400002225,
29
+ 0.0398444740000059,
30
+ 0.039379225999994105,
31
+ 0.037841882000009264,
32
+ 0.03578465799995456,
33
+ 0.04149315699999079,
34
+ 0.04236354799996889,
35
+ 0.03885950900001944,
36
+ 0.03852791000002753,
37
+ 0.03928008200000477,
38
+ 0.03711653000004844,
39
+ 0.039855545000023085,
40
+ 0.0385356050000496,
41
+ 0.038527058999989094,
42
+ 0.03953612000003659,
43
+ 0.03939455500000122,
44
+ 0.039313593999963814,
45
+ 0.03854181700000936,
46
+ 0.038492985000004865,
47
+ 0.03908657099998436
 
48
  ]
49
  },
50
  "throughput": {
51
  "unit": "samples/s",
52
+ "value": 25.528089950121366
53
  },
54
  "energy": {
55
  "unit": "kWh",
56
+ "cpu": 1.343258176270116e-06,
57
+ "ram": 5.613570432258491e-08,
58
  "gpu": 0.0,
59
+ "total": 1.399393880592701e-06
60
  },
61
  "efficiency": {
62
  "unit": "samples/kWh",
63
+ "value": 714595.0928243725
64
  }
65
  }
66
  }