File size: 3,681 Bytes
86a7286
 
 
 
8f43d4e
86a7286
 
 
b7fa4f6
3d4e58c
 
86a7286
81a19d4
86a7286
 
 
3d4e58c
 
86a7286
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
06734df
 
 
86a7286
 
 
1a02420
86a7286
 
 
 
 
 
 
 
 
 
 
 
 
 
704f222
6186c49
 
86a7286
 
 
5aec944
 
d87dc02
86a7286
 
7fbe26b
86a7286
 
 
cddbbaf
 
 
 
 
 
 
86a7286
 
cddbbaf
 
6f67062
5aec944
46737c6
86a7286
46737c6
86a7286
06734df
86a7286
 
 
7dc038b
86a7286
46737c6
22d2308
3af6d14
 
 
86a7286
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
{
    "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
    "backend": {
        "name": "pytorch",
        "version": "2.3.1+rocm5.7",
        "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
        "task": "text-classification",
        "library": "transformers",
        "model_type": "roberta",
        "model": "FacebookAI/roberta-base",
        "processor": "FacebookAI/roberta-base",
        "device": "cuda",
        "device_ids": "5",
        "seed": 42,
        "inter_op_num_threads": null,
        "intra_op_num_threads": null,
        "model_kwargs": {},
        "processor_kwargs": {},
        "no_weights": true,
        "device_map": null,
        "torch_dtype": null,
        "eval_mode": true,
        "to_bettertransformer": false,
        "low_cpu_mem_usage": null,
        "attn_implementation": null,
        "cache_implementation": null,
        "autocast_enabled": false,
        "autocast_dtype": null,
        "torch_compile": false,
        "torch_compile_target": "forward",
        "torch_compile_config": {},
        "quantization_scheme": null,
        "quantization_config": {},
        "deepspeed_inference": false,
        "deepspeed_inference_config": {},
        "peft_type": null,
        "peft_config": {}
    },
    "scenario": {
        "name": "inference",
        "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
        "iterations": 1,
        "duration": 1,
        "warmup_runs": 1,
        "input_shapes": {
            "batch_size": 2,
            "sequence_length": 16,
            "num_choices": 2
        },
        "new_tokens": null,
        "memory": true,
        "latency": true,
        "energy": false,
        "forward_kwargs": {},
        "generate_kwargs": {
            "max_new_tokens": 2,
            "min_new_tokens": 2
        },
        "call_kwargs": {
            "num_inference_steps": 2
        }
    },
    "launcher": {
        "name": "process",
        "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
        "device_isolation": true,
        "device_isolation_action": "warn",
        "numactl": false,
        "numactl_kwargs": {},
        "start_method": "spawn"
    },
    "environment": {
        "cpu": " AMD EPYC 7763 64-Core Processor",
        "cpu_count": 128,
        "cpu_ram_mb": 1082014.490624,
        "system": "Linux",
        "machine": "x86_64",
        "platform": "Linux-5.15.0-122-generic-x86_64-with-glibc2.35",
        "processor": "x86_64",
        "python_version": "3.10.12",
        "gpu": [
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]"
        ],
        "gpu_count": 8,
        "gpu_vram_mb": 549621596160,
        "optimum_benchmark_version": "0.5.0.dev0",
        "optimum_benchmark_commit": null,
        "transformers_version": "4.47.0",
        "transformers_commit": null,
        "accelerate_version": "1.2.0",
        "accelerate_commit": null,
        "diffusers_version": "0.31.0",
        "diffusers_commit": null,
        "optimum_version": null,
        "optimum_commit": null,
        "timm_version": "1.0.12",
        "timm_commit": null,
        "peft_version": "0.14.0",
        "peft_commit": null
    },
    "print_report": true,
    "log_report": true
}