IlyasMoutawwakil HF staff commited on
Commit
3019049
·
verified ·
1 Parent(s): b916de4

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark_report.json CHANGED
@@ -2,108 +2,105 @@
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 1027.239936,
6
- "max_global_vram": 2103.209984,
7
- "max_process_vram": 289458.167808,
8
  "max_reserved": 773.849088,
9
  "max_allocated": 745.087488
10
  },
11
  "latency": {
12
  "unit": "s",
13
- "count": 80,
14
- "total": 1.0052957162857055,
15
- "mean": 0.012566196453571321,
16
- "stdev": 0.0006625729655892718,
17
- "p50": 0.012202911853790284,
18
- "p90": 0.013340913963317871,
19
- "p95": 0.013399683046340942,
20
- "p99": 0.014362688531875594,
21
  "values": [
22
- 0.012973154067993164,
23
- 0.013207714080810546,
24
- 0.013374594688415528,
25
- 0.01344467544555664,
26
- 0.0133925142288208,
27
- 0.013397315025329589,
28
- 0.013371874809265136,
29
- 0.01333747386932373,
30
- 0.013068353652954102,
31
- 0.012853632926940918,
32
- 0.012794592857360839,
33
- 0.01296499252319336,
34
- 0.013110114097595214,
35
- 0.01329859447479248,
36
- 0.01288851261138916,
37
- 0.013084994316101075,
38
- 0.013012033462524414,
39
- 0.013114433288574218,
40
- 0.012974753379821777,
41
- 0.013265633583068847,
42
- 0.012972673416137696,
43
- 0.013068513870239258,
44
- 0.012804512977600097,
45
- 0.01641252326965332,
46
- 0.012443072319030761,
47
- 0.012344191551208496,
48
- 0.012093791007995605,
49
- 0.01215715217590332,
50
- 0.012088830947875977,
51
- 0.012147231101989746,
52
- 0.012369631767272949,
53
- 0.012226911544799804,
54
- 0.01211747169494629,
55
- 0.012100510597229004,
56
- 0.012129791259765625,
57
- 0.012191551208496094,
58
- 0.01208051109313965,
59
- 0.012076830863952637,
60
- 0.012138751029968262,
61
- 0.012081951141357421,
62
- 0.01236899185180664,
63
- 0.012155232429504394,
64
- 0.012077630996704102,
65
- 0.012095231056213379,
66
- 0.012163552284240722,
67
- 0.012072351455688477,
68
- 0.012065311431884766,
69
- 0.012145471572875977,
70
- 0.012082430839538574,
71
- 0.012077791213989258,
72
- 0.012136192321777343,
73
- 0.012068031311035156,
74
- 0.012052511215209961,
75
- 0.012146910667419434,
76
- 0.01208403205871582,
77
- 0.012060030937194825,
78
- 0.012184990882873534,
79
- 0.012086112022399903,
80
- 0.012099870681762695,
81
- 0.012276991844177246,
82
- 0.012069630622863769,
83
- 0.012191072463989257,
84
- 0.012101791381835937,
85
- 0.012482272148132325,
86
- 0.013817795753479004,
87
- 0.013764036178588868,
88
- 0.012948192596435548,
89
- 0.012761153221130372,
90
- 0.012851552963256836,
91
- 0.012745633125305175,
92
- 0.012795232772827148,
93
- 0.012386272430419922,
94
- 0.012212671279907227,
95
- 0.012087870597839355,
96
- 0.012073951721191406,
97
- 0.012073631286621095,
98
- 0.012076670646667481,
99
- 0.012083390235900878,
100
- 0.01219315242767334,
101
- 0.012103711128234863
102
  ]
103
  },
104
  "throughput": {
105
  "unit": "samples/s",
106
- "value": 79.57857444730615
107
  },
108
  "energy": null,
109
  "efficiency": null
 
2
  "forward": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 1026.801664,
6
+ "max_global_vram": 2103.226368,
7
+ "max_process_vram": 288477.220864,
8
  "max_reserved": 773.849088,
9
  "max_allocated": 745.087488
10
  },
11
  "latency": {
12
  "unit": "s",
13
+ "count": 77,
14
+ "total": 0.9986726512908934,
15
+ "mean": 0.012969774692089526,
16
+ "stdev": 0.00030675520486909714,
17
+ "p50": 0.01294227123260498,
18
+ "p90": 0.013350208282470704,
19
+ "p95": 0.013439296340942384,
20
+ "p99": 0.01400096179962158,
21
  "values": [
22
+ 0.012647710800170898,
23
+ 0.013074432373046875,
24
+ 0.013316351890563965,
25
+ 0.013425632476806641,
26
+ 0.013324031829833985,
27
+ 0.013127872467041016,
28
+ 0.012998591423034668,
29
+ 0.01300147247314453,
30
+ 0.012982590675354004,
31
+ 0.012844831466674804,
32
+ 0.013051551818847656,
33
+ 0.012946270942687988,
34
+ 0.013152671813964843,
35
+ 0.012912350654602051,
36
+ 0.013049792289733887,
37
+ 0.012929150581359863,
38
+ 0.013166912078857422,
39
+ 0.01303059196472168,
40
+ 0.013289312362670898,
41
+ 0.013069791793823242,
42
+ 0.013173952102661132,
43
+ 0.013092511177062989,
44
+ 0.013133951187133789,
45
+ 0.012987071990966798,
46
+ 0.012984991073608398,
47
+ 0.012697630882263184,
48
+ 0.012764830589294434,
49
+ 0.012652669906616211,
50
+ 0.012632830619812012,
51
+ 0.012910270690917968,
52
+ 0.012620031356811523,
53
+ 0.01271907138824463,
54
+ 0.012699070930480958,
55
+ 0.013061791419982911,
56
+ 0.012818111419677734,
57
+ 0.012775071144104004,
58
+ 0.012700671195983887,
59
+ 0.013063232421875,
60
+ 0.01273331069946289,
61
+ 0.012779390335083008,
62
+ 0.012771550178527833,
63
+ 0.013034111976623535,
64
+ 0.012964990615844727,
65
+ 0.012762431144714356,
66
+ 0.012769631385803223,
67
+ 0.012823711395263672,
68
+ 0.012771711349487305,
69
+ 0.013150752067565918,
70
+ 0.012700190544128418,
71
+ 0.012708029747009277,
72
+ 0.012681790351867676,
73
+ 0.012652030944824219,
74
+ 0.012672510147094727,
75
+ 0.013062110900878906,
76
+ 0.012631071090698243,
77
+ 0.01263523006439209,
78
+ 0.012656670570373535,
79
+ 0.012639869689941406,
80
+ 0.012704509735107422,
81
+ 0.013057632446289063,
82
+ 0.01278307056427002,
83
+ 0.013493951797485351,
84
+ 0.013202911376953124,
85
+ 0.013425151824951171,
86
+ 0.013512673377990723,
87
+ 0.01434083366394043,
88
+ 0.013893633842468263,
89
+ 0.013313152313232423,
90
+ 0.01338947296142578,
91
+ 0.013422431945800782,
92
+ 0.01294227123260498,
93
+ 0.012894750595092774,
94
+ 0.012751550674438476,
95
+ 0.01269363021850586,
96
+ 0.01298419189453125,
97
+ 0.012726750373840332,
98
+ 0.012709309577941894
 
 
 
99
  ]
100
  },
101
  "throughput": {
102
  "unit": "samples/s",
103
+ "value": 77.10234169371624
104
  },
105
  "energy": null,
106
  "efficiency": null