hf-transformers-bot's picture
Upload folder using huggingface_hub
c89d585 verified
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.4.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model_type": "gemma",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.261056,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.4.0",
"optimum_benchmark_commit": null,
"transformers_version": "4.45.0.dev0",
"transformers_commit": "2e3f8f74747deeeead6cf1f0c12cf01bd7169b82",
"accelerate_version": "0.34.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.22.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.12.1.dev0",
"peft_commit": null
}
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 1320.886272,
"max_global_vram": 6768.033792,
"max_process_vram": 0.0,
"max_reserved": 6138.363904,
"max_allocated": 6060.931072
},
"latency": {
"unit": "s",
"count": 1,
"total": 12.100828125,
"mean": 12.100828125,
"stdev": 0.0,
"p50": 12.100828125,
"p90": 12.100828125,
"p95": 12.100828125,
"p99": 12.100828125,
"values": [
12.100828125
]
},
"throughput": null,
"energy": null,
"efficiency": null
},
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1764.220928,
"max_global_vram": 6789.005312,
"max_process_vram": 0.0,
"max_reserved": 6142.558208,
"max_allocated": 5028.450816
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.04432889556884766,
"mean": 0.02216444778442383,
"stdev": 0.0005190086364746105,
"p50": 0.02216444778442383,
"p90": 0.022579654693603516,
"p95": 0.02263155555725098,
"p99": 0.022673076248168948,
"values": [
0.021645439147949218,
0.02268345642089844
]
},
"throughput": {
"unit": "tokens/s",
"value": 315.8210873595183
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1788.575744,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5031.820288
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.182965576171875,
"mean": 2.5914827880859375,
"stdev": 0.007712036132812461,
"p50": 2.5914827880859375,
"p90": 2.5976524169921875,
"p95": 2.5984236206054687,
"p99": 2.5990405834960937,
"values": [
2.583770751953125,
2.59919482421875
]
},
"throughput": {
"unit": "tokens/s",
"value": 49.00669245571254
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 254,
"total": 5.182701566696171,
"mean": 0.020404336876756562,
"stdev": 0.0005085949116201209,
"p50": 0.020154848098754882,
"p90": 0.021154099273681642,
"p95": 0.021270783519744874,
"p99": 0.02152579065322876,
"values": [
0.020099071502685546,
0.01987583923339844,
0.01985740852355957,
0.02001408004760742,
0.019803136825561524,
0.020113407135009767,
0.02001203155517578,
0.020098047256469728,
0.01999667167663574,
0.019846176147460936,
0.01992803192138672,
0.02122854423522949,
0.021761024475097656,
0.020909055709838868,
0.02126335906982422,
0.020982784271240236,
0.021028863906860353,
0.019853311538696287,
0.02024345588684082,
0.020699296951293945,
0.020846431732177734,
0.02083328056335449,
0.020876287460327148,
0.021334016799926758,
0.021180416107177736,
0.02105855941772461,
0.021098495483398438,
0.020130815505981444,
0.02025369644165039,
0.02017791938781738,
0.020781055450439453,
0.020658176422119142,
0.02081177520751953,
0.021218303680419923,
0.020133888244628906,
0.020039680480957032,
0.02000383949279785,
0.021353471755981446,
0.02121625518798828,
0.02110873603820801,
0.021135360717773437,
0.020733951568603515,
0.020810752868652343,
0.020802560806274413,
0.02071049690246582,
0.02082908821105957,
0.020914176940917968,
0.021166080474853514,
0.02104319953918457,
0.02104319953918457,
0.02126540756225586,
0.021196800231933592,
0.02002841567993164,
0.02021990394592285,
0.020324352264404297,
0.02030080032348633,
0.020161535263061522,
0.01998028755187988,
0.019960832595825196,
0.020141056060791016,
0.02006425666809082,
0.020943872451782225,
0.020867071151733398,
0.02101043128967285,
0.02059878349304199,
0.02021887969970703,
0.02026188850402832,
0.020068351745605468,
0.02006630325317383,
0.01999564743041992,
0.019876863479614256,
0.02024140739440918,
0.020110336303710938,
0.02006118392944336,
0.019944543838500976,
0.019938207626342772,
0.02007961654663086,
0.01989232063293457,
0.019933088302612305,
0.02006937599182129,
0.019876863479614256,
0.019965951919555663,
0.019976192474365235,
0.01988403129577637,
0.019919872283935547,
0.020884479522705078,
0.02083737564086914,
0.02068377685546875,
0.019904512405395508,
0.019936256408691407,
0.020189184188842774,
0.020139007568359374,
0.020016128540039063,
0.01999564743041992,
0.02006118392944336,
0.02040934371948242,
0.020151296615600587,
0.020117504119873047,
0.020064384460449218,
0.019964799880981446,
0.01992192077636719,
0.019902463912963866,
0.01987993621826172,
0.020536319732666015,
0.020005983352661134,
0.01983990478515625,
0.019986431121826173,
0.019909631729125975,
0.0198287353515625,
0.020007936477661133,
0.020183040618896485,
0.019929088592529298,
0.020075519561767577,
0.01991372871398926,
0.01985651206970215,
0.02009280014038086,
0.019887104034423828,
0.02009718322753906,
0.020009824752807617,
0.019891199111938478,
0.019924032211303712,
0.020096960067749022,
0.02011136054992676,
0.02011238479614258,
0.02004991912841797,
0.019993600845336915,
0.020876287460327148,
0.020292703628540038,
0.019912607192993165,
0.019834911346435547,
0.020008928298950197,
0.019943424224853516,
0.019943424224853516,
0.019861503601074217,
0.020564992904663085,
0.020762624740600585,
0.021011455535888672,
0.021156864166259767,
0.02038374328613281,
0.020972543716430665,
0.020197376251220703,
0.020109312057495117,
0.02000383949279785,
0.020102144241333008,
0.02003763198852539,
0.020157440185546875,
0.019877887725830077,
0.020840448379516603,
0.021086208343505858,
0.020976640701293944,
0.0212807674407959,
0.020914176940917968,
0.021300224304199217,
0.021000192642211913,
0.021348352432250976,
0.02103193664550781,
0.021147647857666017,
0.020488191604614257,
0.020958208084106447,
0.021344255447387696,
0.021659648895263672,
0.021086208343505858,
0.02102579116821289,
0.020990976333618162,
0.02030899238586426,
0.020133888244628906,
0.02046156883239746,
0.02141798400878906,
0.021135360717773437,
0.021168224334716795,
0.021107616424560546,
0.02128179168701172,
0.02104319953918457,
0.020747264862060546,
0.020993024826049804,
0.02103500747680664,
0.021354496002197267,
0.020098047256469728,
0.020144128799438478,
0.019985408782958985,
0.019981311798095702,
0.02008678436279297,
0.02040115165710449,
0.020716543197631835,
0.020677631378173827,
0.02067353630065918,
0.020669439315795898,
0.021235712051391603,
0.01981644821166992,
0.019928064346313477,
0.019894271850585937,
0.019950592041015625,
0.020248575210571287,
0.019911680221557617,
0.02004684829711914,
0.02006118392944336,
0.020234304428100584,
0.02015225601196289,
0.01988198471069336,
0.019803136825561524,
0.019901504516601564,
0.020051904678344726,
0.019994623184204103,
0.019932159423828123,
0.021115903854370118,
0.020727807998657227,
0.02107904052734375,
0.021164031982421876,
0.019912704467773438,
0.020171775817871093,
0.020273151397705077,
0.01983795166015625,
0.02007142448425293,
0.02042367935180664,
0.021329919815063478,
0.021097471237182617,
0.020883455276489257,
0.021111808776855468,
0.021164031982421876,
0.021118976593017577,
0.02164735984802246,
0.020934656143188478,
0.02070742416381836,
0.021055391311645508,
0.02005504035949707,
0.02028339195251465,
0.020333696365356445,
0.020423551559448243,
0.02023423957824707,
0.02008166313171387,
0.02012774467468262,
0.02110054397583008,
0.020728832244873048,
0.020090879440307616,
0.01988915252685547,
0.019786752700805665,
0.020176895141601564,
0.019956735610961913,
0.01984921646118164,
0.01986764717102051,
0.019781631469726564,
0.020137983322143553,
0.02007347106933594,
0.01985536003112793,
0.019959808349609375,
0.01988915252685547,
0.020008031845092773,
0.020264863967895508,
0.02026700782775879,
0.019963903427124022,
0.0198922233581543,
0.019916799545288084,
0.019886240005493164,
0.020071264266967773
]
},
"throughput": {
"unit": "tokens/s",
"value": 49.00918888175886
},
"energy": null,
"efficiency": null
}
}
}