benchmarks / pytorch-llama /4bit-gptq-exllama-v2.json
jerryzh168's picture
Upload pytorch-llama/4bit-gptq-exllama-v2.json with huggingface_hub
13c8ee2 verified
raw
history blame
21.1 kB
{
"config": {
"name": "pytorch-llama",
"backend": {
"name": "pytorch",
"version": "2.5.1",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model_type": "llama",
"model": "meta-llama/Llama-2-7b-chat-hf",
"processor": "meta-llama/Llama-2-7b-chat-hf",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": null,
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {},
"quantization_scheme": "gptq",
"quantization_config": {
"bits": 4,
"use_exllama ": true,
"version": 2,
"model_seqlen": 256
},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 10,
"duration": 10,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 128
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 32,
"min_new_tokens": 32
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " Intel(R) Xeon(R) Platinum 8339HC CPU @ 1.80GHz",
"cpu_count": 22,
"cpu_ram_mb": 189584.162816,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.19.0-0_fbk12_hardened_11583_g0bef9520ca2b-x86_64-with-glibc2.34",
"processor": "x86_64",
"python_version": "3.10.15",
"gpu": [
"NVIDIA PG509-210"
],
"gpu_count": 1,
"gpu_vram_mb": 85899345920,
"optimum_benchmark_version": "0.5.0.dev0",
"optimum_benchmark_commit": "78d7da546ad060a586060d0d3680bec33f32a45b",
"transformers_version": "4.47.0.dev0",
"transformers_commit": null,
"accelerate_version": "1.1.0",
"accelerate_commit": null,
"diffusers_version": "0.31.0",
"diffusers_commit": null,
"optimum_version": "1.24.0.dev0",
"optimum_commit": null,
"timm_version": "1.0.11",
"timm_commit": null,
"peft_version": "0.13.2",
"peft_commit": null
},
"print_report": true,
"log_report": true
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 6375.051264,
"max_global_vram": 6294.470656,
"max_process_vram": 5381.292032,
"max_reserved": 4303.355904,
"max_allocated": 4034.388992
},
"latency": {
"unit": "s",
"values": [
13.85648046875
],
"count": 1,
"total": 13.85648046875,
"mean": 13.85648046875,
"p50": 13.85648046875,
"p90": 13.85648046875,
"p95": 13.85648046875,
"p99": 13.85648046875,
"stdev": 0,
"stdev_": 0
},
"throughput": null,
"energy": null,
"efficiency": null
},
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 6607.847424,
"max_global_vram": 6663.569408,
"max_process_vram": 5750.390784,
"max_reserved": 4401.922048,
"max_allocated": 4122.738688
},
"latency": {
"unit": "s",
"values": [
0.04380115127563477,
0.044065792083740236,
0.04284396743774414,
0.045498081207275394,
0.04559075164794922,
0.045726974487304686,
0.04463980865478516,
0.043799774169921875,
0.04394268798828125,
0.04424748611450195
],
"count": 10,
"total": 0.44415647506713873,
"mean": 0.044415647506713875,
"p50": 0.0441566390991211,
"p90": 0.04560437393188477,
"p95": 0.04566567420959473,
"p99": 0.04571471443176269,
"stdev": 0.0008901578921887393,
"stdev_": 2.0041538110058688
},
"throughput": {
"unit": "tokens/s",
"value": 2881.867251415202
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 6607.847424,
"max_global_vram": 6663.569408,
"max_process_vram": 5750.390784,
"max_reserved": 4401.922048,
"max_allocated": 4137.581568
},
"latency": {
"unit": "s",
"values": [
1.150227783203125,
1.1435328369140625,
1.144312744140625,
1.190687255859375,
1.213025146484375,
1.18650830078125,
1.1643739013671874,
1.1702857666015625,
1.171079345703125,
1.1755086669921875
],
"count": 10,
"total": 11.709541748046878,
"mean": 1.1709541748046877,
"p50": 1.1706825561523437,
"p90": 1.192921044921875,
"p95": 1.202973095703125,
"p99": 1.2110147363281252,
"stdev": 0.02087361186790158,
"stdev_": 1.782615606745093
},
"throughput": {
"unit": "tokens/s",
"value": 26.474135937190475
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"values": [
0.03704742431640625,
0.03801862335205078,
0.03683036804199219,
0.03736947250366211,
0.037531902313232425,
0.0362281608581543,
0.036543422698974606,
0.03659683227539062,
0.03652758407592773,
0.035843486785888674,
0.03605881500244141,
0.03645119857788086,
0.036394336700439456,
0.03593667221069336,
0.038279327392578125,
0.03829715347290039,
0.035945022583007816,
0.03586348724365234,
0.03625321578979492,
0.03735884857177734,
0.03703507232666016,
0.0374218864440918,
0.03696966552734375,
0.038542240142822266,
0.03727628707885742,
0.03686703872680664,
0.03680438232421875,
0.03810559844970703,
0.039213054656982424,
0.03820521545410156,
0.03734912109375,
0.03707907104492188,
0.03696028900146484,
0.03681977462768555,
0.0366451530456543,
0.03587907028198242,
0.03661350250244141,
0.036641281127929685,
0.03638809585571289,
0.036821056365966796,
0.03863504028320312,
0.038028961181640626,
0.037765953063964845,
0.03720204925537109,
0.036520767211914065,
0.0372674560546875,
0.0366354866027832,
0.03651372909545898,
0.03662031936645508,
0.036466751098632816,
0.036754528045654294,
0.037453887939453125,
0.03774105453491211,
0.037873600006103514,
0.03758505630493164,
0.03643840026855469,
0.036547679901123044,
0.03644598388671875,
0.03646492767333984,
0.03608649444580078,
0.0359411506652832,
0.03592134475708008,
0.03652691268920898,
0.035943489074707034,
0.037416481018066404,
0.03776649475097656,
0.03933123016357422,
0.038307968139648436,
0.037603839874267575,
0.03676652908325195,
0.038499393463134766,
0.037764606475830076,
0.03725721740722656,
0.03790572738647461,
0.03670614242553711,
0.03616582489013672,
0.036733665466308595,
0.0370786247253418,
0.03708268737792969,
0.03802550506591797,
0.03842115020751953,
0.036956897735595705,
0.03582921600341797,
0.03581209564208984,
0.03574800109863281,
0.036472896575927734,
0.035612449645996094,
0.03564271926879883,
0.03549161529541016,
0.03605599975585937,
0.035842655181884765,
0.03591231918334961,
0.03668409729003906,
0.03751772689819336,
0.03773062515258789,
0.037615711212158204,
0.03905785751342773,
0.038182689666748044,
0.03779929733276367,
0.037969982147216794,
0.038040191650390624,
0.0374323501586914,
0.03721731185913086,
0.03748988723754883,
0.03724009704589844,
0.03739788818359375,
0.03840438461303711,
0.0380200309753418,
0.03875526428222656,
0.038402782440185544,
0.03946188735961914,
0.038712478637695315,
0.03806233596801758,
0.038259040832519534,
0.03925459289550781,
0.038986305236816406,
0.038868545532226566,
0.03813158416748047,
0.040268638610839846,
0.03865705490112305,
0.03819065475463867,
0.03994492721557617,
0.03960185623168945,
0.03899526214599609,
0.03987353515625,
0.03912857437133789,
0.03919475173950195,
0.040785438537597654,
0.0394859504699707,
0.03977878570556641,
0.039401409149169925,
0.039279808044433595,
0.03918147277832031,
0.03891398239135742,
0.03834908676147461,
0.03938937759399414,
0.039163936614990236,
0.038878623962402346,
0.03871548843383789,
0.03902934265136719,
0.038719230651855466,
0.03899795150756836,
0.0389073600769043,
0.03945587158203125,
0.0393889274597168,
0.038677566528320315,
0.03829119873046875,
0.039578689575195315,
0.03933651351928711,
0.03875030517578125,
0.0388996467590332,
0.038635902404785157,
0.038656543731689454,
0.03874214553833008,
0.03839907073974609,
0.038819808959960934,
0.037350814819335935,
0.03864905548095703,
0.03935408020019531,
0.03941791915893555,
0.03919753646850586,
0.03815875244140625,
0.03823785781860352,
0.038115615844726565,
0.03750201416015625,
0.03737737655639648,
0.0380462417602539,
0.039334304809570314,
0.03817583847045898,
0.038138622283935546,
0.03928931045532227,
0.03857622528076172,
0.038378814697265624,
0.03889503860473633,
0.038693729400634765,
0.03744009780883789,
0.03821750259399414,
0.037451614379882814,
0.03876931381225586,
0.036935489654541014,
0.037678974151611325,
0.03831856155395508,
0.03786636734008789,
0.03664931106567383,
0.0383930549621582,
0.038075359344482425,
0.03762172698974609,
0.03747430419921875,
0.03811830520629883,
0.03753548812866211,
0.036760478973388674,
0.03783712005615234,
0.03858419036865234,
0.03797046279907226,
0.03668038558959961,
0.035972545623779294,
0.03712803268432617,
0.03942684936523438,
0.03768000030517578,
0.037852001190185544,
0.03769347381591797,
0.037981056213378904,
0.03730089569091797,
0.03695516967773437,
0.03803424072265625,
0.03757382583618164,
0.037867328643798825,
0.03795862579345703,
0.03830137634277344,
0.038239070892333984,
0.038098495483398435,
0.037338977813720704,
0.03693308639526367,
0.03595008087158203,
0.03611049652099609,
0.03732265472412109,
0.03708911895751953,
0.038954689025878904,
0.03857740783691406,
0.03857612609863281,
0.03817001724243164,
0.03862220764160156,
0.038685791015625,
0.037855232238769534,
0.03797929763793945,
0.03812217712402344,
0.03757644653320313,
0.037375297546386715,
0.036920673370361326,
0.03679331207275391,
0.03756044769287109,
0.03670236968994141,
0.0362872314453125,
0.03632060623168945,
0.03667324829101563,
0.03764944076538086,
0.03774982452392578,
0.03793164825439453,
0.03762361526489258,
0.0372217903137207,
0.037382080078125,
0.03862550354003906,
0.03826480102539063,
0.03809439849853516,
0.038249534606933595,
0.03844358444213867,
0.037539615631103515,
0.03681257629394531,
0.03787974548339844,
0.036924800872802734,
0.03736979293823242,
0.038113086700439454,
0.0370885124206543,
0.03772537612915039,
0.038632320404052733,
0.038101696014404295,
0.037794174194335936,
0.03836051177978515,
0.03727360153198242,
0.038094463348388674,
0.03678287887573242,
0.036885280609130856,
0.03773196792602539,
0.03762812805175781,
0.03745171356201172,
0.037923969268798825,
0.038128063201904296,
0.03844169616699219,
0.038367870330810544,
0.03920854568481445,
0.038261280059814454,
0.038267841339111326,
0.03837958526611328,
0.03797315216064453,
0.03796073532104492,
0.037338592529296874,
0.03718870544433594,
0.03648582458496094,
0.03638083267211914,
0.03782160186767578,
0.038430015563964845,
0.038142528533935544,
0.03849155044555664,
0.03668576049804687,
0.037329662322998045,
0.03640796661376953,
0.037342048645019534,
0.037889888763427734,
0.037280895233154296,
0.03743507385253906,
0.03669216156005859,
0.03592975997924805,
0.037109310150146485,
0.03768560028076172,
0.03863158416748047,
0.03851126480102539,
0.03792326354980469,
0.0385654411315918,
0.03867942428588867,
0.03905379104614258,
0.03907219314575195,
0.03866819381713867,
0.03833721542358398,
0.03866239929199219,
0.03859321594238281,
0.037945121765136716,
0.03818454360961914,
0.03740816116333008,
0.03818662261962891,
0.037453407287597655
],
"count": 310,
"total": 11.699872890472411,
"mean": 0.03774152545313682,
"p50": 0.03781044960021973,
"p90": 0.03905419769287109,
"p95": 0.03934617519378662,
"p99": 0.03986500770568848,
"stdev": 0.0010086151854723352,
"stdev_": 2.6724282427977646
},
"throughput": {
"unit": "tokens/s",
"value": 26.4960143500741
},
"energy": null,
"efficiency": null
}
}
}