File size: 3,617 Bytes
a2085d5
 
 
 
18558d8
a2085d5
 
 
f6af31a
960467c
 
a2085d5
321e6dd
a2085d5
 
 
960467c
 
a2085d5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a8ef934
a2085d5
 
 
 
 
 
 
 
 
 
 
 
 
 
2d8da84
3d76456
 
a2085d5
 
 
b3403bd
 
321e6dd
a2085d5
 
321e6dd
a2085d5
 
 
aba250f
 
 
 
 
 
 
a2085d5
 
aba250f
 
321e6dd
b3403bd
321e6dd
a2085d5
58448df
a2085d5
7e92e96
a2085d5
 
 
b3403bd
a2085d5
321e6dd
25d5ad9
a2085d5
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
{
    "name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
    "backend": {
        "name": "pytorch",
        "version": "2.3.1+rocm5.7",
        "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
        "task": "multiple-choice",
        "library": "transformers",
        "model_type": "roberta",
        "model": "FacebookAI/roberta-base",
        "processor": "FacebookAI/roberta-base",
        "device": "cuda",
        "device_ids": "6",
        "seed": 42,
        "inter_op_num_threads": null,
        "intra_op_num_threads": null,
        "model_kwargs": {},
        "processor_kwargs": {},
        "no_weights": true,
        "device_map": null,
        "torch_dtype": null,
        "eval_mode": true,
        "to_bettertransformer": false,
        "low_cpu_mem_usage": null,
        "attn_implementation": null,
        "cache_implementation": null,
        "autocast_enabled": false,
        "autocast_dtype": null,
        "torch_compile": false,
        "torch_compile_target": "forward",
        "torch_compile_config": {},
        "quantization_scheme": null,
        "quantization_config": {},
        "deepspeed_inference": false,
        "deepspeed_inference_config": {},
        "peft_type": null,
        "peft_config": {}
    },
    "scenario": {
        "name": "inference",
        "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
        "iterations": 1,
        "duration": 1,
        "warmup_runs": 1,
        "input_shapes": {
            "batch_size": 1,
            "num_choices": 2,
            "sequence_length": 2
        },
        "new_tokens": null,
        "memory": true,
        "latency": true,
        "energy": false,
        "forward_kwargs": {},
        "generate_kwargs": {
            "max_new_tokens": 2,
            "min_new_tokens": 2
        },
        "call_kwargs": {
            "num_inference_steps": 2
        }
    },
    "launcher": {
        "name": "process",
        "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
        "device_isolation": true,
        "device_isolation_action": "warn",
        "numactl": false,
        "numactl_kwargs": {},
        "start_method": "spawn"
    },
    "environment": {
        "cpu": " AMD EPYC 7763 64-Core Processor",
        "cpu_count": 128,
        "cpu_ram_mb": 1082014.482432,
        "system": "Linux",
        "machine": "x86_64",
        "platform": "Linux-5.15.0-122-generic-x86_64-with-glibc2.35",
        "processor": "x86_64",
        "python_version": "3.10.12",
        "gpu": [
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]"
        ],
        "gpu_count": 8,
        "gpu_vram_mb": 549621596160,
        "optimum_benchmark_version": "0.5.0",
        "optimum_benchmark_commit": null,
        "transformers_version": "4.45.1",
        "transformers_commit": null,
        "accelerate_version": "0.34.2",
        "accelerate_commit": null,
        "diffusers_version": "0.30.3",
        "diffusers_commit": null,
        "optimum_version": null,
        "optimum_commit": null,
        "timm_version": "1.0.9",
        "timm_commit": null,
        "peft_version": "0.13.0",
        "peft_commit": null
    }
}