meg-huggingface commited on
Commit
00cb3ba
1 Parent(s): b25c00a

Adding a10 results from energy score

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/.hydra/config.yaml +96 -0
  2. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/.hydra/hydra.yaml +175 -0
  3. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/.hydra/overrides.yaml +2 -0
  4. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/benchmark_report.json +203 -0
  5. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/cli.log +188 -0
  6. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/error.log +0 -0
  7. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/experiment_config.json +110 -0
  8. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/generate_codecarbon.json +33 -0
  9. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/prefill_codecarbon.json +33 -0
  10. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/preprocess_codecarbon.json +33 -0
  11. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/.hydra/config.yaml +96 -0
  12. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/.hydra/hydra.yaml +175 -0
  13. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/.hydra/overrides.yaml +2 -0
  14. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/benchmark_report.json +203 -0
  15. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/cli.log +188 -0
  16. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/error.log +0 -0
  17. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/experiment_config.json +110 -0
  18. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/generate_codecarbon.json +33 -0
  19. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/prefill_codecarbon.json +33 -0
  20. runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/preprocess_codecarbon.json +33 -0
  21. runs/text_generation/a10g-large/HuggingFaceTB/.DS_Store +0 -0
  22. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/.hydra/config.yaml +96 -0
  23. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/.hydra/hydra.yaml +175 -0
  24. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/.hydra/overrides.yaml +2 -0
  25. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/benchmark_report.json +203 -0
  26. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/cli.log +188 -0
  27. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/error.log +0 -0
  28. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/experiment_config.json +110 -0
  29. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/generate_codecarbon.json +33 -0
  30. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/prefill_codecarbon.json +33 -0
  31. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/preprocess_codecarbon.json +33 -0
  32. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/.hydra/config.yaml +96 -0
  33. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/.hydra/hydra.yaml +175 -0
  34. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/.hydra/overrides.yaml +2 -0
  35. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/benchmark_report.json +203 -0
  36. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/cli.log +188 -0
  37. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/error.log +0 -0
  38. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/experiment_config.json +110 -0
  39. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/generate_codecarbon.json +33 -0
  40. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/prefill_codecarbon.json +33 -0
  41. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/preprocess_codecarbon.json +33 -0
  42. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/.hydra/config.yaml +96 -0
  43. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/.hydra/hydra.yaml +175 -0
  44. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/.hydra/overrides.yaml +2 -0
  45. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/benchmark_report.json +203 -0
  46. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/cli.log +188 -0
  47. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/error.log +0 -0
  48. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/experiment_config.json +110 -0
  49. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/generate_codecarbon.json +33 -0
  50. runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/prefill_codecarbon.json +33 -0
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/.hydra/config.yaml ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ backend:
2
+ name: pytorch
3
+ version: 2.4.0
4
+ _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend
5
+ task: text-generation
6
+ model: EleutherAI/pythia-1.4b
7
+ processor: EleutherAI/pythia-1.4b
8
+ library: null
9
+ device: cuda
10
+ device_ids: '0'
11
+ seed: 42
12
+ inter_op_num_threads: null
13
+ intra_op_num_threads: null
14
+ hub_kwargs: {}
15
+ no_weights: true
16
+ device_map: null
17
+ torch_dtype: null
18
+ amp_autocast: false
19
+ amp_dtype: null
20
+ eval_mode: true
21
+ to_bettertransformer: false
22
+ low_cpu_mem_usage: null
23
+ attn_implementation: null
24
+ cache_implementation: null
25
+ torch_compile: false
26
+ torch_compile_config: {}
27
+ quantization_scheme: null
28
+ quantization_config: {}
29
+ deepspeed_inference: false
30
+ deepspeed_inference_config: {}
31
+ peft_type: null
32
+ peft_config: {}
33
+ launcher:
34
+ name: process
35
+ _target_: optimum_benchmark.launchers.process.launcher.ProcessLauncher
36
+ device_isolation: false
37
+ device_isolation_action: warn
38
+ start_method: spawn
39
+ benchmark:
40
+ name: energy_star
41
+ _target_: optimum_benchmark.benchmarks.energy_star.benchmark.EnergyStarBenchmark
42
+ dataset_name: EnergyStarAI/text_generation
43
+ dataset_config: ''
44
+ dataset_split: train
45
+ num_samples: 1000
46
+ input_shapes:
47
+ batch_size: 1
48
+ text_column_name: text
49
+ truncation: true
50
+ max_length: -1
51
+ dataset_prefix1: ''
52
+ dataset_prefix2: ''
53
+ t5_task: ''
54
+ image_column_name: image
55
+ resize: false
56
+ question_column_name: question
57
+ context_column_name: context
58
+ sentence1_column_name: sentence1
59
+ sentence2_column_name: sentence2
60
+ audio_column_name: audio
61
+ iterations: 10
62
+ warmup_runs: 10
63
+ energy: true
64
+ forward_kwargs: {}
65
+ generate_kwargs:
66
+ max_new_tokens: 10
67
+ min_new_tokens: 10
68
+ call_kwargs: {}
69
+ experiment_name: text_generation
70
+ environment:
71
+ cpu: ' AMD EPYC 7R32'
72
+ cpu_count: 48
73
+ cpu_ram_mb: 200472.73984
74
+ system: Linux
75
+ machine: x86_64
76
+ platform: Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35
77
+ processor: x86_64
78
+ python_version: 3.9.20
79
+ gpu:
80
+ - NVIDIA A10G
81
+ gpu_count: 1
82
+ gpu_vram_mb: 24146608128
83
+ optimum_benchmark_version: 0.2.0
84
+ optimum_benchmark_commit: null
85
+ transformers_version: 4.44.0
86
+ transformers_commit: null
87
+ accelerate_version: 0.33.0
88
+ accelerate_commit: null
89
+ diffusers_version: 0.30.0
90
+ diffusers_commit: null
91
+ optimum_version: null
92
+ optimum_commit: null
93
+ timm_version: null
94
+ timm_commit: null
95
+ peft_version: null
96
+ peft_commit: null
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/.hydra/hydra.yaml ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: /runs/text_generation/EleutherAI/pythia-1.4b/2024-10-25-13-06-57
4
+ sweep:
5
+ dir: sweeps/${experiment_name}/${backend.model}/${now:%Y-%m-%d-%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
11
+ max_batch_size: null
12
+ params: null
13
+ help:
14
+ app_name: ${hydra.job.name}
15
+ header: '${hydra.help.app_name} is powered by Hydra.
16
+
17
+ '
18
+ footer: 'Powered by Hydra (https://hydra.cc)
19
+
20
+ Use --hydra-help to view Hydra specific help
21
+
22
+ '
23
+ template: '${hydra.help.header}
24
+
25
+ == Configuration groups ==
26
+
27
+ Compose your configuration from those groups (group=option)
28
+
29
+
30
+ $APP_CONFIG_GROUPS
31
+
32
+
33
+ == Config ==
34
+
35
+ Override anything in the config (foo.bar=value)
36
+
37
+
38
+ $CONFIG
39
+
40
+
41
+ ${hydra.help.footer}
42
+
43
+ '
44
+ hydra_help:
45
+ template: 'Hydra (${hydra.runtime.version})
46
+
47
+ See https://hydra.cc for more info.
48
+
49
+
50
+ == Flags ==
51
+
52
+ $FLAGS_HELP
53
+
54
+
55
+ == Configuration groups ==
56
+
57
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
58
+ to command line)
59
+
60
+
61
+ $HYDRA_CONFIG_GROUPS
62
+
63
+
64
+ Use ''--cfg hydra'' to Show the Hydra config.
65
+
66
+ '
67
+ hydra_help: ???
68
+ hydra_logging:
69
+ version: 1
70
+ formatters:
71
+ colorlog:
72
+ (): colorlog.ColoredFormatter
73
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
74
+ handlers:
75
+ console:
76
+ class: logging.StreamHandler
77
+ formatter: colorlog
78
+ stream: ext://sys.stdout
79
+ root:
80
+ level: INFO
81
+ handlers:
82
+ - console
83
+ disable_existing_loggers: false
84
+ job_logging:
85
+ version: 1
86
+ formatters:
87
+ simple:
88
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
89
+ colorlog:
90
+ (): colorlog.ColoredFormatter
91
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
92
+ - %(message)s'
93
+ log_colors:
94
+ DEBUG: purple
95
+ INFO: green
96
+ WARNING: yellow
97
+ ERROR: red
98
+ CRITICAL: red
99
+ handlers:
100
+ console:
101
+ class: logging.StreamHandler
102
+ formatter: colorlog
103
+ stream: ext://sys.stdout
104
+ file:
105
+ class: logging.FileHandler
106
+ formatter: simple
107
+ filename: ${hydra.job.name}.log
108
+ root:
109
+ level: INFO
110
+ handlers:
111
+ - console
112
+ - file
113
+ disable_existing_loggers: false
114
+ env: {}
115
+ mode: RUN
116
+ searchpath: []
117
+ callbacks: {}
118
+ output_subdir: .hydra
119
+ overrides:
120
+ hydra:
121
+ - hydra.run.dir=/runs/text_generation/EleutherAI/pythia-1.4b/2024-10-25-13-06-57
122
+ - hydra.mode=RUN
123
+ task:
124
+ - backend.model=EleutherAI/pythia-1.4b
125
+ - backend.processor=EleutherAI/pythia-1.4b
126
+ job:
127
+ name: cli
128
+ chdir: true
129
+ override_dirname: backend.model=EleutherAI/pythia-1.4b,backend.processor=EleutherAI/pythia-1.4b
130
+ id: ???
131
+ num: ???
132
+ config_name: text_generation
133
+ env_set:
134
+ OVERRIDE_BENCHMARKS: '1'
135
+ env_copy: []
136
+ config:
137
+ override_dirname:
138
+ kv_sep: '='
139
+ item_sep: ','
140
+ exclude_keys: []
141
+ runtime:
142
+ version: 1.3.2
143
+ version_base: '1.3'
144
+ cwd: /
145
+ config_sources:
146
+ - path: hydra.conf
147
+ schema: pkg
148
+ provider: hydra
149
+ - path: optimum_benchmark
150
+ schema: pkg
151
+ provider: main
152
+ - path: hydra_plugins.hydra_colorlog.conf
153
+ schema: pkg
154
+ provider: hydra-colorlog
155
+ - path: /optimum-benchmark/examples/energy_star
156
+ schema: file
157
+ provider: command-line
158
+ - path: ''
159
+ schema: structured
160
+ provider: schema
161
+ output_dir: /runs/text_generation/EleutherAI/pythia-1.4b/2024-10-25-13-06-57
162
+ choices:
163
+ benchmark: energy_star
164
+ launcher: process
165
+ backend: pytorch
166
+ hydra/env: default
167
+ hydra/callbacks: null
168
+ hydra/job_logging: colorlog
169
+ hydra/hydra_logging: colorlog
170
+ hydra/hydra_help: default
171
+ hydra/help: default
172
+ hydra/sweeper: basic
173
+ hydra/launcher: basic
174
+ hydra/output: default
175
+ verbose: false
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/.hydra/overrides.yaml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ - backend.model=EleutherAI/pythia-1.4b
2
+ - backend.processor=EleutherAI/pythia-1.4b
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/benchmark_report.json ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "prefill": {
3
+ "memory": null,
4
+ "latency": null,
5
+ "throughput": null,
6
+ "energy": {
7
+ "unit": "kWh",
8
+ "cpu": 0.0006784651691832389,
9
+ "ram": 6.872767178977215e-06,
10
+ "gpu": 0.004670849625565543,
11
+ "total": 0.00535618756192776
12
+ },
13
+ "efficiency": {
14
+ "unit": "tokens/kWh",
15
+ "value": 56280142.64151448
16
+ },
17
+ "measures": [
18
+ {
19
+ "unit": "kWh",
20
+ "cpu": 0.0007471547884041582,
21
+ "ram": 7.565518406011505e-06,
22
+ "gpu": 0.005128198824777996,
23
+ "total": 0.005882919131588165
24
+ },
25
+ {
26
+ "unit": "kWh",
27
+ "cpu": 0.0007498932815120901,
28
+ "ram": 7.596245961500314e-06,
29
+ "gpu": 0.005162669130131636,
30
+ "total": 0.005920158657605227
31
+ },
32
+ {
33
+ "unit": "kWh",
34
+ "cpu": 0.0007506841969611641,
35
+ "ram": 7.6041352101112216e-06,
36
+ "gpu": 0.005159375238607655,
37
+ "total": 0.005917663570778931
38
+ },
39
+ {
40
+ "unit": "kWh",
41
+ "cpu": 0.0007552490596986899,
42
+ "ram": 7.65116218203134e-06,
43
+ "gpu": 0.00519234248720446,
44
+ "total": 0.005955242709085182
45
+ },
46
+ {
47
+ "unit": "kWh",
48
+ "cpu": 0.0007571123219730605,
49
+ "ram": 7.669590319577743e-06,
50
+ "gpu": 0.00525252309090396,
51
+ "total": 0.006017305003196596
52
+ },
53
+ {
54
+ "unit": "kWh",
55
+ "cpu": 0.0007562589633509356,
56
+ "ram": 7.661224940613736e-06,
57
+ "gpu": 0.005222808067132156,
58
+ "total": 0.005986728255423707
59
+ },
60
+ {
61
+ "unit": "kWh",
62
+ "cpu": 0.0,
63
+ "ram": 0.0,
64
+ "gpu": 0.0,
65
+ "total": 0.0
66
+ },
67
+ {
68
+ "unit": "kWh",
69
+ "cpu": 0.0007560068380162398,
70
+ "ram": 7.658837285322218e-06,
71
+ "gpu": 0.0052021186061357305,
72
+ "total": 0.005965784281437292
73
+ },
74
+ {
75
+ "unit": "kWh",
76
+ "cpu": 0.0007560777030474536,
77
+ "ram": 7.659780890775818e-06,
78
+ "gpu": 0.005200013604452103,
79
+ "total": 0.005963751088390333
80
+ },
81
+ {
82
+ "unit": "kWh",
83
+ "cpu": 0.000756214538868598,
84
+ "ram": 7.661176593828245e-06,
85
+ "gpu": 0.005188447206309732,
86
+ "total": 0.0059523229217721615
87
+ }
88
+ ]
89
+ },
90
+ "decode": {
91
+ "memory": null,
92
+ "latency": null,
93
+ "throughput": null,
94
+ "energy": {
95
+ "unit": "kWh",
96
+ "cpu": 0.0015004224549616597,
97
+ "ram": 1.5204797701429561e-05,
98
+ "gpu": 0.00586308749602189,
99
+ "total": 0.00737871474868498
100
+ },
101
+ "efficiency": {
102
+ "unit": "tokens/kWh",
103
+ "value": 1219724.6141821598
104
+ },
105
+ "measures": [
106
+ {
107
+ "unit": "kWh",
108
+ "cpu": 0.0016782613314461846,
109
+ "ram": 1.7009955692935013e-05,
110
+ "gpu": 0.006698563692180137,
111
+ "total": 0.008393834979319253
112
+ },
113
+ {
114
+ "unit": "kWh",
115
+ "cpu": 0.0016719845362669326,
116
+ "ram": 1.6942875567914424e-05,
117
+ "gpu": 0.0065212854948022425,
118
+ "total": 0.008210212906637095
119
+ },
120
+ {
121
+ "unit": "kWh",
122
+ "cpu": 0.0016724901459935718,
123
+ "ram": 1.6948727439907015e-05,
124
+ "gpu": 0.006527124666140693,
125
+ "total": 0.00821656353957417
126
+ },
127
+ {
128
+ "unit": "kWh",
129
+ "cpu": 0.0016649095729317186,
130
+ "ram": 1.6871157784986704e-05,
131
+ "gpu": 0.0064939879729633,
132
+ "total": 0.008175768703680013
133
+ },
134
+ {
135
+ "unit": "kWh",
136
+ "cpu": -0.0007571123219730605,
137
+ "ram": -7.669590319577743e-06,
138
+ "gpu": -0.00525252309090396,
139
+ "total": -0.006017305003196596
140
+ },
141
+ {
142
+ "unit": "kWh",
143
+ "cpu": 0.0016639060387660326,
144
+ "ram": 1.6861220430135516e-05,
145
+ "gpu": 0.00647061517648817,
146
+ "total": 0.008151382435684335
147
+ },
148
+ {
149
+ "unit": "kWh",
150
+ "cpu": 0.002422301665296781,
151
+ "ram": 2.4544091842681722e-05,
152
+ "gpu": 0.011690915463836049,
153
+ "total": 0.014137761220975514
154
+ },
155
+ {
156
+ "unit": "kWh",
157
+ "cpu": 0.0016633325706456384,
158
+ "ram": 1.6854834230008524e-05,
159
+ "gpu": 0.006496602697278142,
160
+ "total": 0.008176790102153775
161
+ },
162
+ {
163
+ "unit": "kWh",
164
+ "cpu": 0.0016610744073210513,
165
+ "ram": 1.6832224697802917e-05,
166
+ "gpu": 0.0064857387996974936,
167
+ "total": 0.008163645431716333
168
+ },
169
+ {
170
+ "unit": "kWh",
171
+ "cpu": 0.0016630766029217424,
172
+ "ram": 1.685247964750152e-05,
173
+ "gpu": 0.006498564087736636,
174
+ "total": 0.008178493170305896
175
+ }
176
+ ]
177
+ },
178
+ "per_token": {
179
+ "memory": null,
180
+ "latency": null,
181
+ "throughput": null,
182
+ "energy": null,
183
+ "efficiency": null,
184
+ "measures": null
185
+ },
186
+ "preprocess": {
187
+ "memory": null,
188
+ "latency": null,
189
+ "throughput": null,
190
+ "energy": {
191
+ "unit": "kWh",
192
+ "cpu": 1.1119852669475626e-05,
193
+ "ram": 8.376988347301967e-08,
194
+ "gpu": 1.861168155592452e-05,
195
+ "total": 2.9815304108873166e-05
196
+ },
197
+ "efficiency": {
198
+ "unit": "samples/kWh",
199
+ "value": 33539822.2452608
200
+ },
201
+ "measures": null
202
+ }
203
+ }
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/cli.log ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [2024-10-25 13:07:00,323][launcher][INFO] - ََAllocating process launcher
2
+ [2024-10-25 13:07:00,323][process][INFO] - + Setting multiprocessing start method to spawn.
3
+ [2024-10-25 13:07:00,335][process][INFO] - + Launched benchmark in isolated process 351.
4
+ [PROC-0][2024-10-25 13:07:02,873][datasets][INFO] - PyTorch version 2.4.0 available.
5
+ [PROC-0][2024-10-25 13:07:03,794][backend][INFO] - َAllocating pytorch backend
6
+ [PROC-0][2024-10-25 13:07:03,794][backend][INFO] - + Setting random seed to 42
7
+ [PROC-0][2024-10-25 13:07:04,645][pytorch][INFO] - + Using AutoModel class AutoModelForCausalLM
8
+ [PROC-0][2024-10-25 13:07:04,645][pytorch][INFO] - + Creating backend temporary directory
9
+ [PROC-0][2024-10-25 13:07:04,645][pytorch][INFO] - + Loading model with random weights
10
+ [PROC-0][2024-10-25 13:07:04,646][pytorch][INFO] - + Creating no weights model
11
+ [PROC-0][2024-10-25 13:07:04,646][pytorch][INFO] - + Creating no weights model directory
12
+ [PROC-0][2024-10-25 13:07:04,646][pytorch][INFO] - + Creating no weights model state dict
13
+ [PROC-0][2024-10-25 13:07:04,666][pytorch][INFO] - + Saving no weights model safetensors
14
+ [PROC-0][2024-10-25 13:07:04,666][pytorch][INFO] - + Saving no weights model pretrained config
15
+ [PROC-0][2024-10-25 13:07:04,667][pytorch][INFO] - + Loading no weights AutoModel
16
+ [PROC-0][2024-10-25 13:07:04,667][pytorch][INFO] - + Loading model directly on device: cuda
17
+ [PROC-0][2024-10-25 13:07:05,038][pytorch][INFO] - + Turning on model's eval mode
18
+ [PROC-0][2024-10-25 13:07:05,045][benchmark][INFO] - Allocating energy_star benchmark
19
+ [PROC-0][2024-10-25 13:07:05,045][energy_star][INFO] - + Loading raw dataset
20
+ [PROC-0][2024-10-25 13:07:06,316][energy_star][INFO] - + Updating Text Generation kwargs with default values
21
+ [PROC-0][2024-10-25 13:07:06,317][energy_star][INFO] - + Initializing Text Generation report
22
+ [PROC-0][2024-10-25 13:07:06,317][energy][INFO] - + Tracking GPU energy on devices [0]
23
+ [PROC-0][2024-10-25 13:07:10,519][energy_star][INFO] - + Preprocessing dataset
24
+ [PROC-0][2024-10-25 13:07:11,462][energy][INFO] - + Saving codecarbon emission data to preprocess_codecarbon.json
25
+ [PROC-0][2024-10-25 13:07:11,463][energy_star][INFO] - + Preparing backend for Inference
26
+ [PROC-0][2024-10-25 13:07:11,463][energy_star][INFO] - + Initialising dataloader
27
+ [PROC-0][2024-10-25 13:07:11,463][energy_star][INFO] - + Warming up backend for Inference
28
+ [PROC-0][2024-10-25 13:07:12,868][energy_star][INFO] - + Additional warmup for Text Generation
29
+ [PROC-0][2024-10-25 13:07:13,067][energy_star][INFO] - + Running Text Generation energy tracking for 10 iterations
30
+ [PROC-0][2024-10-25 13:07:13,067][energy_star][INFO] - + Prefill iteration 1/10
31
+ [PROC-0][2024-10-25 13:08:16,356][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
32
+ [PROC-0][2024-10-25 13:08:16,357][energy_star][INFO] - + Prefill iteration 2/10
33
+ [PROC-0][2024-10-25 13:09:19,878][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
34
+ [PROC-0][2024-10-25 13:09:19,878][energy_star][INFO] - + Prefill iteration 3/10
35
+ [PROC-0][2024-10-25 13:10:23,466][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
36
+ [PROC-0][2024-10-25 13:10:23,467][energy_star][INFO] - + Prefill iteration 4/10
37
+ [PROC-0][2024-10-25 13:11:27,441][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
38
+ [PROC-0][2024-10-25 13:11:27,442][energy_star][INFO] - + Prefill iteration 5/10
39
+ [PROC-0][2024-10-25 13:12:31,574][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
40
+ [PROC-0][2024-10-25 13:12:31,574][energy_star][INFO] - + Prefill iteration 6/10
41
+ [PROC-0][2024-10-25 13:13:35,635][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
42
+ [PROC-0][2024-10-25 13:13:35,635][energy_star][INFO] - + Prefill iteration 7/10
43
+ [PROC-0][2024-10-25 13:14:39,660][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
44
+ [PROC-0][2024-10-25 13:14:39,660][energy_star][INFO] - + Prefill iteration 8/10
45
+ [PROC-0][2024-10-25 13:15:43,699][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
46
+ [PROC-0][2024-10-25 13:15:43,700][energy_star][INFO] - + Prefill iteration 9/10
47
+ [PROC-0][2024-10-25 13:16:47,745][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
48
+ [PROC-0][2024-10-25 13:16:47,745][energy_star][INFO] - + Prefill iteration 10/10
49
+ [PROC-0][2024-10-25 13:17:51,801][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
50
+ [PROC-0][2024-10-25 13:17:51,802][energy_star][INFO] - + Decoding iteration 1/10
51
+ [PROC-0][2024-10-25 13:21:17,249][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
52
+ [PROC-0][2024-10-25 13:21:17,250][energy_star][INFO] - + Decoding iteration 2/10
53
+ [PROC-0][2024-10-25 13:24:42,398][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
54
+ [PROC-0][2024-10-25 13:24:42,398][energy_star][INFO] - + Decoding iteration 3/10
55
+ [PROC-0][2024-10-25 13:28:07,656][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
56
+ [PROC-0][2024-10-25 13:28:07,656][energy_star][INFO] - + Decoding iteration 4/10
57
+ [PROC-0][2024-10-25 13:31:32,659][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
58
+ [PROC-0][2024-10-25 13:31:32,659][energy_star][INFO] - + Decoding iteration 5/10
59
+ [PROC-0][2024-10-25 13:34:57,839][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
60
+ [PROC-0][2024-10-25 13:34:57,840][energy_star][INFO] - + Decoding iteration 6/10
61
+ [PROC-0][2024-10-25 13:38:22,843][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
62
+ [PROC-0][2024-10-25 13:38:22,843][energy_star][INFO] - + Decoding iteration 7/10
63
+ [PROC-0][2024-10-25 13:41:48,027][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
64
+ [PROC-0][2024-10-25 13:41:48,027][energy_star][INFO] - + Decoding iteration 8/10
65
+ [PROC-0][2024-10-25 13:45:12,960][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
66
+ [PROC-0][2024-10-25 13:45:12,960][energy_star][INFO] - + Decoding iteration 9/10
67
+ [PROC-0][2024-10-25 13:48:37,708][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
68
+ [PROC-0][2024-10-25 13:48:37,708][energy_star][INFO] - + Decoding iteration 10/10
69
+ [PROC-0][2024-10-25 13:52:02,637][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
70
+ [PROC-0][2024-10-25 13:52:02,638][energy][INFO] - + prefill energy consumption:
71
+ [PROC-0][2024-10-25 13:52:02,638][energy][INFO] - + CPU: 0.000678 (kWh)
72
+ [PROC-0][2024-10-25 13:52:02,638][energy][INFO] - + GPU: 0.004671 (kWh)
73
+ [PROC-0][2024-10-25 13:52:02,638][energy][INFO] - + RAM: 0.000007 (kWh)
74
+ [PROC-0][2024-10-25 13:52:02,638][energy][INFO] - + total: 0.005356 (kWh)
75
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + prefill_iteration_1 energy consumption:
76
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + CPU: 0.000747 (kWh)
77
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + GPU: 0.005128 (kWh)
78
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + RAM: 0.000008 (kWh)
79
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + total: 0.005883 (kWh)
80
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + prefill_iteration_2 energy consumption:
81
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + CPU: 0.000750 (kWh)
82
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + GPU: 0.005163 (kWh)
83
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + RAM: 0.000008 (kWh)
84
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + total: 0.005920 (kWh)
85
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + prefill_iteration_3 energy consumption:
86
+ [PROC-0][2024-10-25 13:52:02,639][energy][INFO] - + CPU: 0.000751 (kWh)
87
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + GPU: 0.005159 (kWh)
88
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + RAM: 0.000008 (kWh)
89
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + total: 0.005918 (kWh)
90
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + prefill_iteration_4 energy consumption:
91
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + CPU: 0.000755 (kWh)
92
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + GPU: 0.005192 (kWh)
93
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + RAM: 0.000008 (kWh)
94
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + total: 0.005955 (kWh)
95
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + prefill_iteration_5 energy consumption:
96
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + CPU: 0.000757 (kWh)
97
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + GPU: 0.005253 (kWh)
98
+ [PROC-0][2024-10-25 13:52:02,640][energy][INFO] - + RAM: 0.000008 (kWh)
99
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + total: 0.006017 (kWh)
100
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + prefill_iteration_6 energy consumption:
101
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + CPU: 0.000756 (kWh)
102
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + GPU: 0.005223 (kWh)
103
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + RAM: 0.000008 (kWh)
104
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + total: 0.005987 (kWh)
105
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + prefill_iteration_7 energy consumption:
106
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + CPU: 0.000000 (kWh)
107
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + GPU: 0.000000 (kWh)
108
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + RAM: 0.000000 (kWh)
109
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + total: 0.000000 (kWh)
110
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + prefill_iteration_8 energy consumption:
111
+ [PROC-0][2024-10-25 13:52:02,641][energy][INFO] - + CPU: 0.000756 (kWh)
112
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + GPU: 0.005202 (kWh)
113
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + RAM: 0.000008 (kWh)
114
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + total: 0.005966 (kWh)
115
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + prefill_iteration_9 energy consumption:
116
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + CPU: 0.000756 (kWh)
117
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + GPU: 0.005200 (kWh)
118
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + RAM: 0.000008 (kWh)
119
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + total: 0.005964 (kWh)
120
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + prefill_iteration_10 energy consumption:
121
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + CPU: 0.000756 (kWh)
122
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + GPU: 0.005188 (kWh)
123
+ [PROC-0][2024-10-25 13:52:02,642][energy][INFO] - + RAM: 0.000008 (kWh)
124
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + total: 0.005952 (kWh)
125
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + decode energy consumption:
126
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + CPU: 0.001500 (kWh)
127
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + GPU: 0.005863 (kWh)
128
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + RAM: 0.000015 (kWh)
129
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + total: 0.007379 (kWh)
130
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + decode_iteration_1 energy consumption:
131
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + CPU: 0.001678 (kWh)
132
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + GPU: 0.006699 (kWh)
133
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + RAM: 0.000017 (kWh)
134
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + total: 0.008394 (kWh)
135
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + decode_iteration_2 energy consumption:
136
+ [PROC-0][2024-10-25 13:52:02,643][energy][INFO] - + CPU: 0.001672 (kWh)
137
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + GPU: 0.006521 (kWh)
138
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + RAM: 0.000017 (kWh)
139
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + total: 0.008210 (kWh)
140
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + decode_iteration_3 energy consumption:
141
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + CPU: 0.001672 (kWh)
142
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + GPU: 0.006527 (kWh)
143
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + RAM: 0.000017 (kWh)
144
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + total: 0.008217 (kWh)
145
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + decode_iteration_4 energy consumption:
146
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + CPU: 0.001665 (kWh)
147
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + GPU: 0.006494 (kWh)
148
+ [PROC-0][2024-10-25 13:52:02,644][energy][INFO] - + RAM: 0.000017 (kWh)
149
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + total: 0.008176 (kWh)
150
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + decode_iteration_5 energy consumption:
151
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + CPU: -0.000757 (kWh)
152
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + GPU: -0.005253 (kWh)
153
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + RAM: -0.000008 (kWh)
154
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + total: -0.006017 (kWh)
155
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + decode_iteration_6 energy consumption:
156
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + CPU: 0.001664 (kWh)
157
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + GPU: 0.006471 (kWh)
158
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + RAM: 0.000017 (kWh)
159
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + total: 0.008151 (kWh)
160
+ [PROC-0][2024-10-25 13:52:02,645][energy][INFO] - + decode_iteration_7 energy consumption:
161
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + CPU: 0.002422 (kWh)
162
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + GPU: 0.011691 (kWh)
163
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + RAM: 0.000025 (kWh)
164
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + total: 0.014138 (kWh)
165
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + decode_iteration_8 energy consumption:
166
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + CPU: 0.001663 (kWh)
167
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + GPU: 0.006497 (kWh)
168
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + RAM: 0.000017 (kWh)
169
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + total: 0.008177 (kWh)
170
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + decode_iteration_9 energy consumption:
171
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + CPU: 0.001661 (kWh)
172
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + GPU: 0.006486 (kWh)
173
+ [PROC-0][2024-10-25 13:52:02,646][energy][INFO] - + RAM: 0.000017 (kWh)
174
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + total: 0.008164 (kWh)
175
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + decode_iteration_10 energy consumption:
176
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + CPU: 0.001663 (kWh)
177
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + GPU: 0.006499 (kWh)
178
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + RAM: 0.000017 (kWh)
179
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + total: 0.008178 (kWh)
180
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + preprocess energy consumption:
181
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + CPU: 0.000011 (kWh)
182
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + GPU: 0.000019 (kWh)
183
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + RAM: 0.000000 (kWh)
184
+ [PROC-0][2024-10-25 13:52:02,647][energy][INFO] - + total: 0.000030 (kWh)
185
+ [PROC-0][2024-10-25 13:52:02,648][energy][INFO] - + prefill energy efficiency: 56280142.641514 (tokens/kWh)
186
+ [PROC-0][2024-10-25 13:52:02,648][energy][INFO] - + decode energy efficiency: 1219724.614182 (tokens/kWh)
187
+ [PROC-0][2024-10-25 13:52:02,648][energy][INFO] - + preprocess energy efficiency: 33539822.245261 (samples/kWh)
188
+ [2024-10-25 13:52:03,342][datasets][INFO] - PyTorch version 2.4.0 available.
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/error.log ADDED
The diff for this file is too large to render. See raw diff
 
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/experiment_config.json ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "experiment_name": "text_generation",
3
+ "backend": {
4
+ "name": "pytorch",
5
+ "version": "2.4.0",
6
+ "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
7
+ "task": "text-generation",
8
+ "model": "EleutherAI/pythia-1.4b",
9
+ "processor": "EleutherAI/pythia-1.4b",
10
+ "library": "transformers",
11
+ "device": "cuda",
12
+ "device_ids": "0",
13
+ "seed": 42,
14
+ "inter_op_num_threads": null,
15
+ "intra_op_num_threads": null,
16
+ "hub_kwargs": {
17
+ "revision": "main",
18
+ "force_download": false,
19
+ "local_files_only": false,
20
+ "trust_remote_code": true
21
+ },
22
+ "no_weights": true,
23
+ "device_map": null,
24
+ "torch_dtype": null,
25
+ "amp_autocast": false,
26
+ "amp_dtype": null,
27
+ "eval_mode": true,
28
+ "to_bettertransformer": false,
29
+ "low_cpu_mem_usage": null,
30
+ "attn_implementation": null,
31
+ "cache_implementation": null,
32
+ "torch_compile": false,
33
+ "torch_compile_config": {},
34
+ "quantization_scheme": null,
35
+ "quantization_config": {},
36
+ "deepspeed_inference": false,
37
+ "deepspeed_inference_config": {},
38
+ "peft_type": null,
39
+ "peft_config": {}
40
+ },
41
+ "launcher": {
42
+ "name": "process",
43
+ "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
44
+ "device_isolation": false,
45
+ "device_isolation_action": "warn",
46
+ "start_method": "spawn"
47
+ },
48
+ "benchmark": {
49
+ "name": "energy_star",
50
+ "_target_": "optimum_benchmark.benchmarks.energy_star.benchmark.EnergyStarBenchmark",
51
+ "dataset_name": "EnergyStarAI/text_generation",
52
+ "dataset_config": "",
53
+ "dataset_split": "train",
54
+ "num_samples": 1000,
55
+ "input_shapes": {
56
+ "batch_size": 1
57
+ },
58
+ "text_column_name": "text",
59
+ "truncation": true,
60
+ "max_length": -1,
61
+ "dataset_prefix1": "",
62
+ "dataset_prefix2": "",
63
+ "t5_task": "",
64
+ "image_column_name": "image",
65
+ "resize": false,
66
+ "question_column_name": "question",
67
+ "context_column_name": "context",
68
+ "sentence1_column_name": "sentence1",
69
+ "sentence2_column_name": "sentence2",
70
+ "audio_column_name": "audio",
71
+ "iterations": 10,
72
+ "warmup_runs": 10,
73
+ "energy": true,
74
+ "forward_kwargs": {},
75
+ "generate_kwargs": {
76
+ "max_new_tokens": 10,
77
+ "min_new_tokens": 10
78
+ },
79
+ "call_kwargs": {}
80
+ },
81
+ "environment": {
82
+ "cpu": " AMD EPYC 7R32",
83
+ "cpu_count": 48,
84
+ "cpu_ram_mb": 200472.73984,
85
+ "system": "Linux",
86
+ "machine": "x86_64",
87
+ "platform": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
88
+ "processor": "x86_64",
89
+ "python_version": "3.9.20",
90
+ "gpu": [
91
+ "NVIDIA A10G"
92
+ ],
93
+ "gpu_count": 1,
94
+ "gpu_vram_mb": 24146608128,
95
+ "optimum_benchmark_version": "0.2.0",
96
+ "optimum_benchmark_commit": null,
97
+ "transformers_version": "4.44.0",
98
+ "transformers_commit": null,
99
+ "accelerate_version": "0.33.0",
100
+ "accelerate_commit": null,
101
+ "diffusers_version": "0.30.0",
102
+ "diffusers_commit": null,
103
+ "optimum_version": null,
104
+ "optimum_commit": null,
105
+ "timm_version": null,
106
+ "timm_commit": null,
107
+ "peft_version": null,
108
+ "peft_commit": null
109
+ }
110
+ }
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/generate_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-25T13:52:02",
3
+ "project_name": "codecarbon",
4
+ "run_id": "8bd2e04f-cfc4-4707-8676-e5f40e6d32e7",
5
+ "duration": -1729480256.4145586,
6
+ "emissions": 0.005216170007237689,
7
+ "emissions_rate": 2.5454396948743776e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 205.30973435218456,
10
+ "ram_power": 0.43064117431640625,
11
+ "cpu_energy": 0.0024192911417903404,
12
+ "gpu_energy": 0.011687011294046368,
13
+ "ram_energy": 2.4513656241329765e-05,
14
+ "energy_consumed": 0.014130816092078058,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/prefill_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-25T13:17:51",
3
+ "project_name": "codecarbon",
4
+ "run_id": "8bd2e04f-cfc4-4707-8676-e5f40e6d32e7",
5
+ "duration": -1729480397.2874951,
6
+ "emissions": 0.0021972070187331605,
7
+ "emissions_rate": 3.430497464397154e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 291.60627364641437,
10
+ "ram_power": 0.4305853843688965,
11
+ "cpu_energy": 0.000756214538868598,
12
+ "gpu_energy": 0.005188447206309732,
13
+ "ram_energy": 7.661176593828245e-06,
14
+ "energy_consumed": 0.0059523229217721615,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-13-06-57/preprocess_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-25T13:07:11",
3
+ "project_name": "codecarbon",
4
+ "run_id": "8bd2e04f-cfc4-4707-8676-e5f40e6d32e7",
5
+ "duration": -1729480460.397016,
6
+ "emissions": 1.100585373385212e-05,
7
+ "emissions_rate": 1.1711243963235281e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 71.30390443126397,
10
+ "ram_power": 0.32120847702026367,
11
+ "cpu_energy": 1.1119852669475626e-05,
12
+ "gpu_energy": 1.861168155592452e-05,
13
+ "ram_energy": 8.376988347301967e-08,
14
+ "energy_consumed": 2.9815304108873166e-05,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/.hydra/config.yaml ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ backend:
2
+ name: pytorch
3
+ version: 2.4.0
4
+ _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend
5
+ task: text-generation
6
+ model: EleutherAI/pythia-1.4b
7
+ processor: EleutherAI/pythia-1.4b
8
+ library: null
9
+ device: cuda
10
+ device_ids: '0'
11
+ seed: 42
12
+ inter_op_num_threads: null
13
+ intra_op_num_threads: null
14
+ hub_kwargs: {}
15
+ no_weights: true
16
+ device_map: null
17
+ torch_dtype: null
18
+ amp_autocast: false
19
+ amp_dtype: null
20
+ eval_mode: true
21
+ to_bettertransformer: false
22
+ low_cpu_mem_usage: null
23
+ attn_implementation: null
24
+ cache_implementation: null
25
+ torch_compile: false
26
+ torch_compile_config: {}
27
+ quantization_scheme: null
28
+ quantization_config: {}
29
+ deepspeed_inference: false
30
+ deepspeed_inference_config: {}
31
+ peft_type: null
32
+ peft_config: {}
33
+ launcher:
34
+ name: process
35
+ _target_: optimum_benchmark.launchers.process.launcher.ProcessLauncher
36
+ device_isolation: false
37
+ device_isolation_action: warn
38
+ start_method: spawn
39
+ benchmark:
40
+ name: energy_star
41
+ _target_: optimum_benchmark.benchmarks.energy_star.benchmark.EnergyStarBenchmark
42
+ dataset_name: EnergyStarAI/text_generation
43
+ dataset_config: ''
44
+ dataset_split: train
45
+ num_samples: 1000
46
+ input_shapes:
47
+ batch_size: 1
48
+ text_column_name: text
49
+ truncation: true
50
+ max_length: -1
51
+ dataset_prefix1: ''
52
+ dataset_prefix2: ''
53
+ t5_task: ''
54
+ image_column_name: image
55
+ resize: false
56
+ question_column_name: question
57
+ context_column_name: context
58
+ sentence1_column_name: sentence1
59
+ sentence2_column_name: sentence2
60
+ audio_column_name: audio
61
+ iterations: 10
62
+ warmup_runs: 10
63
+ energy: true
64
+ forward_kwargs: {}
65
+ generate_kwargs:
66
+ max_new_tokens: 10
67
+ min_new_tokens: 10
68
+ call_kwargs: {}
69
+ experiment_name: text_generation
70
+ environment:
71
+ cpu: ' AMD EPYC 7R32'
72
+ cpu_count: 48
73
+ cpu_ram_mb: 200472.73984
74
+ system: Linux
75
+ machine: x86_64
76
+ platform: Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35
77
+ processor: x86_64
78
+ python_version: 3.9.20
79
+ gpu:
80
+ - NVIDIA A10G
81
+ gpu_count: 1
82
+ gpu_vram_mb: 24146608128
83
+ optimum_benchmark_version: 0.2.0
84
+ optimum_benchmark_commit: null
85
+ transformers_version: 4.44.0
86
+ transformers_commit: null
87
+ accelerate_version: 0.33.0
88
+ accelerate_commit: null
89
+ diffusers_version: 0.30.0
90
+ diffusers_commit: null
91
+ optimum_version: null
92
+ optimum_commit: null
93
+ timm_version: null
94
+ timm_commit: null
95
+ peft_version: null
96
+ peft_commit: null
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/.hydra/hydra.yaml ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: /runs/text_generation/EleutherAI/pythia-1.4b/2024-10-25-14-19-11
4
+ sweep:
5
+ dir: sweeps/${experiment_name}/${backend.model}/${now:%Y-%m-%d-%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
11
+ max_batch_size: null
12
+ params: null
13
+ help:
14
+ app_name: ${hydra.job.name}
15
+ header: '${hydra.help.app_name} is powered by Hydra.
16
+
17
+ '
18
+ footer: 'Powered by Hydra (https://hydra.cc)
19
+
20
+ Use --hydra-help to view Hydra specific help
21
+
22
+ '
23
+ template: '${hydra.help.header}
24
+
25
+ == Configuration groups ==
26
+
27
+ Compose your configuration from those groups (group=option)
28
+
29
+
30
+ $APP_CONFIG_GROUPS
31
+
32
+
33
+ == Config ==
34
+
35
+ Override anything in the config (foo.bar=value)
36
+
37
+
38
+ $CONFIG
39
+
40
+
41
+ ${hydra.help.footer}
42
+
43
+ '
44
+ hydra_help:
45
+ template: 'Hydra (${hydra.runtime.version})
46
+
47
+ See https://hydra.cc for more info.
48
+
49
+
50
+ == Flags ==
51
+
52
+ $FLAGS_HELP
53
+
54
+
55
+ == Configuration groups ==
56
+
57
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
58
+ to command line)
59
+
60
+
61
+ $HYDRA_CONFIG_GROUPS
62
+
63
+
64
+ Use ''--cfg hydra'' to Show the Hydra config.
65
+
66
+ '
67
+ hydra_help: ???
68
+ hydra_logging:
69
+ version: 1
70
+ formatters:
71
+ colorlog:
72
+ (): colorlog.ColoredFormatter
73
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
74
+ handlers:
75
+ console:
76
+ class: logging.StreamHandler
77
+ formatter: colorlog
78
+ stream: ext://sys.stdout
79
+ root:
80
+ level: INFO
81
+ handlers:
82
+ - console
83
+ disable_existing_loggers: false
84
+ job_logging:
85
+ version: 1
86
+ formatters:
87
+ simple:
88
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
89
+ colorlog:
90
+ (): colorlog.ColoredFormatter
91
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
92
+ - %(message)s'
93
+ log_colors:
94
+ DEBUG: purple
95
+ INFO: green
96
+ WARNING: yellow
97
+ ERROR: red
98
+ CRITICAL: red
99
+ handlers:
100
+ console:
101
+ class: logging.StreamHandler
102
+ formatter: colorlog
103
+ stream: ext://sys.stdout
104
+ file:
105
+ class: logging.FileHandler
106
+ formatter: simple
107
+ filename: ${hydra.job.name}.log
108
+ root:
109
+ level: INFO
110
+ handlers:
111
+ - console
112
+ - file
113
+ disable_existing_loggers: false
114
+ env: {}
115
+ mode: RUN
116
+ searchpath: []
117
+ callbacks: {}
118
+ output_subdir: .hydra
119
+ overrides:
120
+ hydra:
121
+ - hydra.run.dir=/runs/text_generation/EleutherAI/pythia-1.4b/2024-10-25-14-19-11
122
+ - hydra.mode=RUN
123
+ task:
124
+ - backend.model=EleutherAI/pythia-1.4b
125
+ - backend.processor=EleutherAI/pythia-1.4b
126
+ job:
127
+ name: cli
128
+ chdir: true
129
+ override_dirname: backend.model=EleutherAI/pythia-1.4b,backend.processor=EleutherAI/pythia-1.4b
130
+ id: ???
131
+ num: ???
132
+ config_name: text_generation
133
+ env_set:
134
+ OVERRIDE_BENCHMARKS: '1'
135
+ env_copy: []
136
+ config:
137
+ override_dirname:
138
+ kv_sep: '='
139
+ item_sep: ','
140
+ exclude_keys: []
141
+ runtime:
142
+ version: 1.3.2
143
+ version_base: '1.3'
144
+ cwd: /
145
+ config_sources:
146
+ - path: hydra.conf
147
+ schema: pkg
148
+ provider: hydra
149
+ - path: optimum_benchmark
150
+ schema: pkg
151
+ provider: main
152
+ - path: hydra_plugins.hydra_colorlog.conf
153
+ schema: pkg
154
+ provider: hydra-colorlog
155
+ - path: /optimum-benchmark/examples/energy_star
156
+ schema: file
157
+ provider: command-line
158
+ - path: ''
159
+ schema: structured
160
+ provider: schema
161
+ output_dir: /runs/text_generation/EleutherAI/pythia-1.4b/2024-10-25-14-19-11
162
+ choices:
163
+ benchmark: energy_star
164
+ launcher: process
165
+ backend: pytorch
166
+ hydra/env: default
167
+ hydra/callbacks: null
168
+ hydra/job_logging: colorlog
169
+ hydra/hydra_logging: colorlog
170
+ hydra/hydra_help: default
171
+ hydra/help: default
172
+ hydra/sweeper: basic
173
+ hydra/launcher: basic
174
+ hydra/output: default
175
+ verbose: false
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/.hydra/overrides.yaml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ - backend.model=EleutherAI/pythia-1.4b
2
+ - backend.processor=EleutherAI/pythia-1.4b
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/benchmark_report.json ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "prefill": {
3
+ "memory": null,
4
+ "latency": null,
5
+ "throughput": null,
6
+ "energy": {
7
+ "unit": "kWh",
8
+ "cpu": 0.0006950257205501962,
9
+ "ram": 7.229079517594969e-06,
10
+ "gpu": 0.004783902188229749,
11
+ "total": 0.00548615698829754
12
+ },
13
+ "efficiency": {
14
+ "unit": "tokens/kWh",
15
+ "value": 54946841.77704233
16
+ },
17
+ "measures": [
18
+ {
19
+ "unit": "kWh",
20
+ "cpu": 0.0007641022352298832,
21
+ "ram": 7.945458815587156e-06,
22
+ "gpu": 0.005254148925537994,
23
+ "total": 0.006026196619583464
24
+ },
25
+ {
26
+ "unit": "kWh",
27
+ "cpu": 0.0007698822285832043,
28
+ "ram": 8.007436582032034e-06,
29
+ "gpu": 0.005301914241527816,
30
+ "total": 0.006079803906693053
31
+ },
32
+ {
33
+ "unit": "kWh",
34
+ "cpu": 0.0007732566303492706,
35
+ "ram": 8.043132134785983e-06,
36
+ "gpu": 0.005305364522065981,
37
+ "total": 0.006086664284550038
38
+ },
39
+ {
40
+ "unit": "kWh",
41
+ "cpu": 0.0007727046854467418,
42
+ "ram": 8.037193759434171e-06,
43
+ "gpu": 0.005334005933867791,
44
+ "total": 0.006114747813073966
45
+ },
46
+ {
47
+ "unit": "kWh",
48
+ "cpu": 0.0007735792766898762,
49
+ "ram": 8.04650520994879e-06,
50
+ "gpu": 0.005336946213998051,
51
+ "total": 0.006118571995897874
52
+ },
53
+ {
54
+ "unit": "kWh",
55
+ "cpu": 0.000773910086490004,
56
+ "ram": 8.050033200335675e-06,
57
+ "gpu": 0.0053161620307038415,
58
+ "total": 0.006098122150394184
59
+ },
60
+ {
61
+ "unit": "kWh",
62
+ "cpu": 0.0,
63
+ "ram": 0.0,
64
+ "gpu": 0.0,
65
+ "total": 0.0
66
+ },
67
+ {
68
+ "unit": "kWh",
69
+ "cpu": 0.0007740681057641875,
70
+ "ram": 8.051292602441139e-06,
71
+ "gpu": 0.005336773991638122,
72
+ "total": 0.006118893390004751
73
+ },
74
+ {
75
+ "unit": "kWh",
76
+ "cpu": 0.0007736332350321597,
77
+ "ram": 8.047155500604038e-06,
78
+ "gpu": 0.00532560537159199,
79
+ "total": 0.006107285762124749
80
+ },
81
+ {
82
+ "unit": "kWh",
83
+ "cpu": 0.0007751207219166339,
84
+ "ram": 8.062587370780708e-06,
85
+ "gpu": 0.005328100651365908,
86
+ "total": 0.006111283960653323
87
+ }
88
+ ]
89
+ },
90
+ "decode": {
91
+ "memory": null,
92
+ "latency": null,
93
+ "throughput": null,
94
+ "energy": {
95
+ "unit": "kWh",
96
+ "cpu": 0.001532609044249083,
97
+ "ram": 1.594622041976216e-05,
98
+ "gpu": 0.006043184112321255,
99
+ "total": 0.0075917393769901025
100
+ },
101
+ "efficiency": {
102
+ "unit": "tokens/kWh",
103
+ "value": 1185499.0738062232
104
+ },
105
+ "measures": [
106
+ {
107
+ "unit": "kWh",
108
+ "cpu": 0.0017120611431651516,
109
+ "ram": 1.781507733712239e-05,
110
+ "gpu": 0.006835263801540092,
111
+ "total": 0.008565140022042382
112
+ },
113
+ {
114
+ "unit": "kWh",
115
+ "cpu": 0.0017026174863408717,
116
+ "ram": 1.771530782239789e-05,
117
+ "gpu": 0.006717194540418081,
118
+ "total": 0.00843752733458136
119
+ },
120
+ {
121
+ "unit": "kWh",
122
+ "cpu": 0.0016996438265546904,
123
+ "ram": 1.768379281359322e-05,
124
+ "gpu": 0.006708423977846101,
125
+ "total": 0.008425751597214388
126
+ },
127
+ {
128
+ "unit": "kWh",
129
+ "cpu": 0.0017036087327997728,
130
+ "ram": 1.772523439230838e-05,
131
+ "gpu": 0.00668814340606616,
132
+ "total": 0.00840947737325825
133
+ },
134
+ {
135
+ "unit": "kWh",
136
+ "cpu": -0.0007735792766898762,
137
+ "ram": -8.04650520994879e-06,
138
+ "gpu": -0.005336946213998051,
139
+ "total": -0.006118571995897874
140
+ },
141
+ {
142
+ "unit": "kWh",
143
+ "cpu": 0.0016993492261053469,
144
+ "ram": 1.7680599692580472e-05,
145
+ "gpu": 0.00669986813766843,
146
+ "total": 0.008416897963466338
147
+ },
148
+ {
149
+ "unit": "kWh",
150
+ "cpu": 0.0024794984861841347,
151
+ "ram": 2.5795559494294352e-05,
152
+ "gpu": 0.012036623518179912,
153
+ "total": 0.014541917563858364
154
+ },
155
+ {
156
+ "unit": "kWh",
157
+ "cpu": 0.0016971929005708168,
158
+ "ram": 1.7658742769242277e-05,
159
+ "gpu": 0.0066783995093817605,
160
+ "total": 0.008393251152721812
161
+ },
162
+ {
163
+ "unit": "kWh",
164
+ "cpu": 0.0017041481349138639,
165
+ "ram": 1.773071350101803e-05,
166
+ "gpu": 0.0067008664718002375,
167
+ "total": 0.008422745320215135
168
+ },
169
+ {
170
+ "unit": "kWh",
171
+ "cpu": 0.0017015497825460567,
172
+ "ram": 1.770368158501338e-05,
173
+ "gpu": 0.006704003974309813,
174
+ "total": 0.008423257438440863
175
+ }
176
+ ]
177
+ },
178
+ "per_token": {
179
+ "memory": null,
180
+ "latency": null,
181
+ "throughput": null,
182
+ "energy": null,
183
+ "efficiency": null,
184
+ "measures": null
185
+ },
186
+ "preprocess": {
187
+ "memory": null,
188
+ "latency": null,
189
+ "throughput": null,
190
+ "energy": {
191
+ "unit": "kWh",
192
+ "cpu": 1.1265942924405358e-05,
193
+ "ram": 8.491845032760329e-08,
194
+ "gpu": 1.888834844376852e-05,
195
+ "total": 3.023920981850148e-05
196
+ },
197
+ "efficiency": {
198
+ "unit": "samples/kWh",
199
+ "value": 33069647.189926323
200
+ },
201
+ "measures": null
202
+ }
203
+ }
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/cli.log ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [2024-10-25 14:19:14,194][launcher][INFO] - ََAllocating process launcher
2
+ [2024-10-25 14:19:14,194][process][INFO] - + Setting multiprocessing start method to spawn.
3
+ [2024-10-25 14:19:14,207][process][INFO] - + Launched benchmark in isolated process 351.
4
+ [PROC-0][2024-10-25 14:19:16,766][datasets][INFO] - PyTorch version 2.4.0 available.
5
+ [PROC-0][2024-10-25 14:19:17,741][backend][INFO] - َAllocating pytorch backend
6
+ [PROC-0][2024-10-25 14:19:17,741][backend][INFO] - + Setting random seed to 42
7
+ [PROC-0][2024-10-25 14:19:18,360][pytorch][INFO] - + Using AutoModel class AutoModelForCausalLM
8
+ [PROC-0][2024-10-25 14:19:18,360][pytorch][INFO] - + Creating backend temporary directory
9
+ [PROC-0][2024-10-25 14:19:18,361][pytorch][INFO] - + Loading model with random weights
10
+ [PROC-0][2024-10-25 14:19:18,361][pytorch][INFO] - + Creating no weights model
11
+ [PROC-0][2024-10-25 14:19:18,361][pytorch][INFO] - + Creating no weights model directory
12
+ [PROC-0][2024-10-25 14:19:18,361][pytorch][INFO] - + Creating no weights model state dict
13
+ [PROC-0][2024-10-25 14:19:18,384][pytorch][INFO] - + Saving no weights model safetensors
14
+ [PROC-0][2024-10-25 14:19:18,384][pytorch][INFO] - + Saving no weights model pretrained config
15
+ [PROC-0][2024-10-25 14:19:18,385][pytorch][INFO] - + Loading no weights AutoModel
16
+ [PROC-0][2024-10-25 14:19:18,385][pytorch][INFO] - + Loading model directly on device: cuda
17
+ [PROC-0][2024-10-25 14:19:18,760][pytorch][INFO] - + Turning on model's eval mode
18
+ [PROC-0][2024-10-25 14:19:18,767][benchmark][INFO] - Allocating energy_star benchmark
19
+ [PROC-0][2024-10-25 14:19:18,767][energy_star][INFO] - + Loading raw dataset
20
+ [PROC-0][2024-10-25 14:19:20,814][energy_star][INFO] - + Updating Text Generation kwargs with default values
21
+ [PROC-0][2024-10-25 14:19:20,814][energy_star][INFO] - + Initializing Text Generation report
22
+ [PROC-0][2024-10-25 14:19:20,814][energy][INFO] - + Tracking GPU energy on devices [0]
23
+ [PROC-0][2024-10-25 14:19:25,041][energy_star][INFO] - + Preprocessing dataset
24
+ [PROC-0][2024-10-25 14:19:25,997][energy][INFO] - + Saving codecarbon emission data to preprocess_codecarbon.json
25
+ [PROC-0][2024-10-25 14:19:25,998][energy_star][INFO] - + Preparing backend for Inference
26
+ [PROC-0][2024-10-25 14:19:25,998][energy_star][INFO] - + Initialising dataloader
27
+ [PROC-0][2024-10-25 14:19:25,998][energy_star][INFO] - + Warming up backend for Inference
28
+ [PROC-0][2024-10-25 14:19:27,541][energy_star][INFO] - + Additional warmup for Text Generation
29
+ [PROC-0][2024-10-25 14:19:27,742][energy_star][INFO] - + Running Text Generation energy tracking for 10 iterations
30
+ [PROC-0][2024-10-25 14:19:27,743][energy_star][INFO] - + Prefill iteration 1/10
31
+ [PROC-0][2024-10-25 14:20:32,467][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
32
+ [PROC-0][2024-10-25 14:20:32,467][energy_star][INFO] - + Prefill iteration 2/10
33
+ [PROC-0][2024-10-25 14:21:37,682][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
34
+ [PROC-0][2024-10-25 14:21:37,682][energy_star][INFO] - + Prefill iteration 3/10
35
+ [PROC-0][2024-10-25 14:22:43,182][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
36
+ [PROC-0][2024-10-25 14:22:43,183][energy_star][INFO] - + Prefill iteration 4/10
37
+ [PROC-0][2024-10-25 14:23:48,636][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
38
+ [PROC-0][2024-10-25 14:23:48,636][energy_star][INFO] - + Prefill iteration 5/10
39
+ [PROC-0][2024-10-25 14:24:54,164][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
40
+ [PROC-0][2024-10-25 14:24:54,164][energy_star][INFO] - + Prefill iteration 6/10
41
+ [PROC-0][2024-10-25 14:25:59,719][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
42
+ [PROC-0][2024-10-25 14:25:59,720][energy_star][INFO] - + Prefill iteration 7/10
43
+ [PROC-0][2024-10-25 14:27:05,283][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
44
+ [PROC-0][2024-10-25 14:27:05,283][energy_star][INFO] - + Prefill iteration 8/10
45
+ [PROC-0][2024-10-25 14:28:10,852][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
46
+ [PROC-0][2024-10-25 14:28:10,852][energy_star][INFO] - + Prefill iteration 9/10
47
+ [PROC-0][2024-10-25 14:29:16,384][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
48
+ [PROC-0][2024-10-25 14:29:16,384][energy_star][INFO] - + Prefill iteration 10/10
49
+ [PROC-0][2024-10-25 14:30:22,042][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
50
+ [PROC-0][2024-10-25 14:30:22,043][energy_star][INFO] - + Decoding iteration 1/10
51
+ [PROC-0][2024-10-25 14:33:51,789][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
52
+ [PROC-0][2024-10-25 14:33:51,789][energy_star][INFO] - + Decoding iteration 2/10
53
+ [PROC-0][2024-10-25 14:37:21,225][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
54
+ [PROC-0][2024-10-25 14:37:21,226][energy_star][INFO] - + Decoding iteration 3/10
55
+ [PROC-0][2024-10-25 14:40:50,695][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
56
+ [PROC-0][2024-10-25 14:40:50,696][energy_star][INFO] - + Decoding iteration 4/10
57
+ [PROC-0][2024-10-25 14:44:20,455][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
58
+ [PROC-0][2024-10-25 14:44:20,455][energy_star][INFO] - + Decoding iteration 5/10
59
+ [PROC-0][2024-10-25 14:47:50,217][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
60
+ [PROC-0][2024-10-25 14:47:50,218][energy_star][INFO] - + Decoding iteration 6/10
61
+ [PROC-0][2024-10-25 14:51:19,718][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
62
+ [PROC-0][2024-10-25 14:51:19,718][energy_star][INFO] - + Decoding iteration 7/10
63
+ [PROC-0][2024-10-25 14:54:49,747][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
64
+ [PROC-0][2024-10-25 14:54:49,747][energy_star][INFO] - + Decoding iteration 8/10
65
+ [PROC-0][2024-10-25 14:58:19,078][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
66
+ [PROC-0][2024-10-25 14:58:19,079][energy_star][INFO] - + Decoding iteration 9/10
67
+ [PROC-0][2024-10-25 15:01:48,962][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
68
+ [PROC-0][2024-10-25 15:01:48,963][energy_star][INFO] - + Decoding iteration 10/10
69
+ [PROC-0][2024-10-25 15:05:18,752][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
70
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + prefill energy consumption:
71
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + CPU: 0.000695 (kWh)
72
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + GPU: 0.004784 (kWh)
73
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + RAM: 0.000007 (kWh)
74
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + total: 0.005486 (kWh)
75
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + prefill_iteration_1 energy consumption:
76
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + CPU: 0.000764 (kWh)
77
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + GPU: 0.005254 (kWh)
78
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + RAM: 0.000008 (kWh)
79
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + total: 0.006026 (kWh)
80
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + prefill_iteration_2 energy consumption:
81
+ [PROC-0][2024-10-25 15:05:18,753][energy][INFO] - + CPU: 0.000770 (kWh)
82
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + GPU: 0.005302 (kWh)
83
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + RAM: 0.000008 (kWh)
84
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + total: 0.006080 (kWh)
85
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + prefill_iteration_3 energy consumption:
86
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + CPU: 0.000773 (kWh)
87
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + GPU: 0.005305 (kWh)
88
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + RAM: 0.000008 (kWh)
89
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + total: 0.006087 (kWh)
90
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + prefill_iteration_4 energy consumption:
91
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + CPU: 0.000773 (kWh)
92
+ [PROC-0][2024-10-25 15:05:18,754][energy][INFO] - + GPU: 0.005334 (kWh)
93
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + RAM: 0.000008 (kWh)
94
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + total: 0.006115 (kWh)
95
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + prefill_iteration_5 energy consumption:
96
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + CPU: 0.000774 (kWh)
97
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + GPU: 0.005337 (kWh)
98
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + RAM: 0.000008 (kWh)
99
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + total: 0.006119 (kWh)
100
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + prefill_iteration_6 energy consumption:
101
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + CPU: 0.000774 (kWh)
102
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + GPU: 0.005316 (kWh)
103
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + RAM: 0.000008 (kWh)
104
+ [PROC-0][2024-10-25 15:05:18,755][energy][INFO] - + total: 0.006098 (kWh)
105
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + prefill_iteration_7 energy consumption:
106
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + CPU: 0.000000 (kWh)
107
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + GPU: 0.000000 (kWh)
108
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + RAM: 0.000000 (kWh)
109
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + total: 0.000000 (kWh)
110
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + prefill_iteration_8 energy consumption:
111
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + CPU: 0.000774 (kWh)
112
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + GPU: 0.005337 (kWh)
113
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + RAM: 0.000008 (kWh)
114
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + total: 0.006119 (kWh)
115
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + prefill_iteration_9 energy consumption:
116
+ [PROC-0][2024-10-25 15:05:18,756][energy][INFO] - + CPU: 0.000774 (kWh)
117
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + GPU: 0.005326 (kWh)
118
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + RAM: 0.000008 (kWh)
119
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + total: 0.006107 (kWh)
120
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + prefill_iteration_10 energy consumption:
121
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + CPU: 0.000775 (kWh)
122
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + GPU: 0.005328 (kWh)
123
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + RAM: 0.000008 (kWh)
124
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + total: 0.006111 (kWh)
125
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + decode energy consumption:
126
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + CPU: 0.001533 (kWh)
127
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + GPU: 0.006043 (kWh)
128
+ [PROC-0][2024-10-25 15:05:18,757][energy][INFO] - + RAM: 0.000016 (kWh)
129
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + total: 0.007592 (kWh)
130
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + decode_iteration_1 energy consumption:
131
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + CPU: 0.001712 (kWh)
132
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + GPU: 0.006835 (kWh)
133
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + RAM: 0.000018 (kWh)
134
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + total: 0.008565 (kWh)
135
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + decode_iteration_2 energy consumption:
136
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + CPU: 0.001703 (kWh)
137
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + GPU: 0.006717 (kWh)
138
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + RAM: 0.000018 (kWh)
139
+ [PROC-0][2024-10-25 15:05:18,758][energy][INFO] - + total: 0.008438 (kWh)
140
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + decode_iteration_3 energy consumption:
141
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + CPU: 0.001700 (kWh)
142
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + GPU: 0.006708 (kWh)
143
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + RAM: 0.000018 (kWh)
144
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + total: 0.008426 (kWh)
145
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + decode_iteration_4 energy consumption:
146
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + CPU: 0.001704 (kWh)
147
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + GPU: 0.006688 (kWh)
148
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + RAM: 0.000018 (kWh)
149
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + total: 0.008409 (kWh)
150
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + decode_iteration_5 energy consumption:
151
+ [PROC-0][2024-10-25 15:05:18,759][energy][INFO] - + CPU: -0.000774 (kWh)
152
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + GPU: -0.005337 (kWh)
153
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + RAM: -0.000008 (kWh)
154
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + total: -0.006119 (kWh)
155
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + decode_iteration_6 energy consumption:
156
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + CPU: 0.001699 (kWh)
157
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + GPU: 0.006700 (kWh)
158
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + RAM: 0.000018 (kWh)
159
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + total: 0.008417 (kWh)
160
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + decode_iteration_7 energy consumption:
161
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + CPU: 0.002479 (kWh)
162
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + GPU: 0.012037 (kWh)
163
+ [PROC-0][2024-10-25 15:05:18,760][energy][INFO] - + RAM: 0.000026 (kWh)
164
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + total: 0.014542 (kWh)
165
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + decode_iteration_8 energy consumption:
166
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + CPU: 0.001697 (kWh)
167
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + GPU: 0.006678 (kWh)
168
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + RAM: 0.000018 (kWh)
169
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + total: 0.008393 (kWh)
170
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + decode_iteration_9 energy consumption:
171
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + CPU: 0.001704 (kWh)
172
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + GPU: 0.006701 (kWh)
173
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + RAM: 0.000018 (kWh)
174
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + total: 0.008423 (kWh)
175
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + decode_iteration_10 energy consumption:
176
+ [PROC-0][2024-10-25 15:05:18,761][energy][INFO] - + CPU: 0.001702 (kWh)
177
+ [PROC-0][2024-10-25 15:05:18,762][energy][INFO] - + GPU: 0.006704 (kWh)
178
+ [PROC-0][2024-10-25 15:05:18,762][energy][INFO] - + RAM: 0.000018 (kWh)
179
+ [PROC-0][2024-10-25 15:05:18,762][energy][INFO] - + total: 0.008423 (kWh)
180
+ [PROC-0][2024-10-25 15:05:18,762][energy][INFO] - + preprocess energy consumption:
181
+ [PROC-0][2024-10-25 15:05:18,762][energy][INFO] - + CPU: 0.000011 (kWh)
182
+ [PROC-0][2024-10-25 15:05:18,762][energy][INFO] - + GPU: 0.000019 (kWh)
183
+ [PROC-0][2024-10-25 15:05:18,762][energy][INFO] - + RAM: 0.000000 (kWh)
184
+ [PROC-0][2024-10-25 15:05:18,762][energy][INFO] - + total: 0.000030 (kWh)
185
+ [PROC-0][2024-10-25 15:05:18,763][energy][INFO] - + prefill energy efficiency: 54946841.777042 (tokens/kWh)
186
+ [PROC-0][2024-10-25 15:05:18,763][energy][INFO] - + decode energy efficiency: 1185499.073806 (tokens/kWh)
187
+ [PROC-0][2024-10-25 15:05:18,763][energy][INFO] - + preprocess energy efficiency: 33069647.189926 (samples/kWh)
188
+ [2024-10-25 15:05:19,487][datasets][INFO] - PyTorch version 2.4.0 available.
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/error.log ADDED
The diff for this file is too large to render. See raw diff
 
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/experiment_config.json ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "experiment_name": "text_generation",
3
+ "backend": {
4
+ "name": "pytorch",
5
+ "version": "2.4.0",
6
+ "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
7
+ "task": "text-generation",
8
+ "model": "EleutherAI/pythia-1.4b",
9
+ "processor": "EleutherAI/pythia-1.4b",
10
+ "library": "transformers",
11
+ "device": "cuda",
12
+ "device_ids": "0",
13
+ "seed": 42,
14
+ "inter_op_num_threads": null,
15
+ "intra_op_num_threads": null,
16
+ "hub_kwargs": {
17
+ "revision": "main",
18
+ "force_download": false,
19
+ "local_files_only": false,
20
+ "trust_remote_code": true
21
+ },
22
+ "no_weights": true,
23
+ "device_map": null,
24
+ "torch_dtype": null,
25
+ "amp_autocast": false,
26
+ "amp_dtype": null,
27
+ "eval_mode": true,
28
+ "to_bettertransformer": false,
29
+ "low_cpu_mem_usage": null,
30
+ "attn_implementation": null,
31
+ "cache_implementation": null,
32
+ "torch_compile": false,
33
+ "torch_compile_config": {},
34
+ "quantization_scheme": null,
35
+ "quantization_config": {},
36
+ "deepspeed_inference": false,
37
+ "deepspeed_inference_config": {},
38
+ "peft_type": null,
39
+ "peft_config": {}
40
+ },
41
+ "launcher": {
42
+ "name": "process",
43
+ "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
44
+ "device_isolation": false,
45
+ "device_isolation_action": "warn",
46
+ "start_method": "spawn"
47
+ },
48
+ "benchmark": {
49
+ "name": "energy_star",
50
+ "_target_": "optimum_benchmark.benchmarks.energy_star.benchmark.EnergyStarBenchmark",
51
+ "dataset_name": "EnergyStarAI/text_generation",
52
+ "dataset_config": "",
53
+ "dataset_split": "train",
54
+ "num_samples": 1000,
55
+ "input_shapes": {
56
+ "batch_size": 1
57
+ },
58
+ "text_column_name": "text",
59
+ "truncation": true,
60
+ "max_length": -1,
61
+ "dataset_prefix1": "",
62
+ "dataset_prefix2": "",
63
+ "t5_task": "",
64
+ "image_column_name": "image",
65
+ "resize": false,
66
+ "question_column_name": "question",
67
+ "context_column_name": "context",
68
+ "sentence1_column_name": "sentence1",
69
+ "sentence2_column_name": "sentence2",
70
+ "audio_column_name": "audio",
71
+ "iterations": 10,
72
+ "warmup_runs": 10,
73
+ "energy": true,
74
+ "forward_kwargs": {},
75
+ "generate_kwargs": {
76
+ "max_new_tokens": 10,
77
+ "min_new_tokens": 10
78
+ },
79
+ "call_kwargs": {}
80
+ },
81
+ "environment": {
82
+ "cpu": " AMD EPYC 7R32",
83
+ "cpu_count": 48,
84
+ "cpu_ram_mb": 200472.73984,
85
+ "system": "Linux",
86
+ "machine": "x86_64",
87
+ "platform": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
88
+ "processor": "x86_64",
89
+ "python_version": "3.9.20",
90
+ "gpu": [
91
+ "NVIDIA A10G"
92
+ ],
93
+ "gpu_count": 1,
94
+ "gpu_vram_mb": 24146608128,
95
+ "optimum_benchmark_version": "0.2.0",
96
+ "optimum_benchmark_commit": null,
97
+ "transformers_version": "4.44.0",
98
+ "transformers_commit": null,
99
+ "accelerate_version": "0.33.0",
100
+ "accelerate_commit": null,
101
+ "diffusers_version": "0.30.0",
102
+ "diffusers_commit": null,
103
+ "optimum_version": null,
104
+ "optimum_commit": null,
105
+ "timm_version": null,
106
+ "timm_commit": null,
107
+ "peft_version": null,
108
+ "peft_commit": null
109
+ }
110
+ }
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/generate_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-25T15:05:18",
3
+ "project_name": "codecarbon",
4
+ "run_id": "8248080f-4663-456d-a137-5e9a7085d7e8",
5
+ "duration": -1729590786.5050704,
6
+ "emissions": 0.005365198897281848,
7
+ "emissions_rate": 2.557515390694824e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 206.47512668371553,
10
+ "ram_power": 0.442159652709961,
11
+ "cpu_energy": 0.0024766705044626906,
12
+ "gpu_energy": 0.012032104625675721,
13
+ "ram_energy": 2.576626895579409e-05,
14
+ "energy_consumed": 0.014534541399094186,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/prefill_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-25T14:30:22",
3
+ "project_name": "codecarbon",
4
+ "run_id": "8248080f-4663-456d-a137-5e9a7085d7e8",
5
+ "duration": -1729590930.6361673,
6
+ "emissions": 0.002255885002929423,
7
+ "emissions_rate": 3.436201095544714e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 292.1524531813292,
10
+ "ram_power": 0.4420952796936035,
11
+ "cpu_energy": 0.0007751207219166339,
12
+ "gpu_energy": 0.005328100651365908,
13
+ "ram_energy": 8.062587370780708e-06,
14
+ "energy_consumed": 0.006111283960653323,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/EleutherAI/pythia-1.4b/2024-10-25-14-19-11/preprocess_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-25T14:19:25",
3
+ "project_name": "codecarbon",
4
+ "run_id": "8248080f-4663-456d-a137-5e9a7085d7e8",
5
+ "duration": -1729590995.334638,
6
+ "emissions": 1.11623319042601e-05,
7
+ "emissions_rate": 1.1722706007048627e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 71.42300504715094,
10
+ "ram_power": 0.32139015197753906,
11
+ "cpu_energy": 1.1265942924405358e-05,
12
+ "gpu_energy": 1.888834844376852e-05,
13
+ "ram_energy": 8.491845032760329e-08,
14
+ "energy_consumed": 3.023920981850148e-05,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/HuggingFaceTB/.DS_Store ADDED
Binary file (6.15 kB). View file
 
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/.hydra/config.yaml ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ backend:
2
+ name: pytorch
3
+ version: 2.4.0
4
+ _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend
5
+ task: text-generation
6
+ model: HuggingFaceTB/SmolLM-1.7B
7
+ processor: HuggingFaceTB/SmolLM-1.7B
8
+ library: null
9
+ device: cuda
10
+ device_ids: '0'
11
+ seed: 42
12
+ inter_op_num_threads: null
13
+ intra_op_num_threads: null
14
+ hub_kwargs: {}
15
+ no_weights: true
16
+ device_map: null
17
+ torch_dtype: null
18
+ amp_autocast: false
19
+ amp_dtype: null
20
+ eval_mode: true
21
+ to_bettertransformer: false
22
+ low_cpu_mem_usage: null
23
+ attn_implementation: null
24
+ cache_implementation: null
25
+ torch_compile: false
26
+ torch_compile_config: {}
27
+ quantization_scheme: null
28
+ quantization_config: {}
29
+ deepspeed_inference: false
30
+ deepspeed_inference_config: {}
31
+ peft_type: null
32
+ peft_config: {}
33
+ launcher:
34
+ name: process
35
+ _target_: optimum_benchmark.launchers.process.launcher.ProcessLauncher
36
+ device_isolation: false
37
+ device_isolation_action: warn
38
+ start_method: spawn
39
+ benchmark:
40
+ name: energy_star
41
+ _target_: optimum_benchmark.benchmarks.energy_star.benchmark.EnergyStarBenchmark
42
+ dataset_name: EnergyStarAI/text_generation
43
+ dataset_config: ''
44
+ dataset_split: train
45
+ num_samples: 1000
46
+ input_shapes:
47
+ batch_size: 1
48
+ text_column_name: text
49
+ truncation: true
50
+ max_length: -1
51
+ dataset_prefix1: ''
52
+ dataset_prefix2: ''
53
+ t5_task: ''
54
+ image_column_name: image
55
+ resize: false
56
+ question_column_name: question
57
+ context_column_name: context
58
+ sentence1_column_name: sentence1
59
+ sentence2_column_name: sentence2
60
+ audio_column_name: audio
61
+ iterations: 10
62
+ warmup_runs: 10
63
+ energy: true
64
+ forward_kwargs: {}
65
+ generate_kwargs:
66
+ max_new_tokens: 10
67
+ min_new_tokens: 10
68
+ call_kwargs: {}
69
+ experiment_name: text_generation
70
+ environment:
71
+ cpu: ' AMD EPYC 7R32'
72
+ cpu_count: 48
73
+ cpu_ram_mb: 200472.73984
74
+ system: Linux
75
+ machine: x86_64
76
+ platform: Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35
77
+ processor: x86_64
78
+ python_version: 3.9.20
79
+ gpu:
80
+ - NVIDIA A10G
81
+ gpu_count: 1
82
+ gpu_vram_mb: 24146608128
83
+ optimum_benchmark_version: 0.2.0
84
+ optimum_benchmark_commit: null
85
+ transformers_version: 4.44.0
86
+ transformers_commit: null
87
+ accelerate_version: 0.33.0
88
+ accelerate_commit: null
89
+ diffusers_version: 0.30.0
90
+ diffusers_commit: null
91
+ optimum_version: null
92
+ optimum_commit: null
93
+ timm_version: null
94
+ timm_commit: null
95
+ peft_version: null
96
+ peft_commit: null
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/.hydra/hydra.yaml ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: ./runs/text_generation/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02
4
+ sweep:
5
+ dir: sweeps/${experiment_name}/${backend.model}/${now:%Y-%m-%d-%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
11
+ max_batch_size: null
12
+ params: null
13
+ help:
14
+ app_name: ${hydra.job.name}
15
+ header: '${hydra.help.app_name} is powered by Hydra.
16
+
17
+ '
18
+ footer: 'Powered by Hydra (https://hydra.cc)
19
+
20
+ Use --hydra-help to view Hydra specific help
21
+
22
+ '
23
+ template: '${hydra.help.header}
24
+
25
+ == Configuration groups ==
26
+
27
+ Compose your configuration from those groups (group=option)
28
+
29
+
30
+ $APP_CONFIG_GROUPS
31
+
32
+
33
+ == Config ==
34
+
35
+ Override anything in the config (foo.bar=value)
36
+
37
+
38
+ $CONFIG
39
+
40
+
41
+ ${hydra.help.footer}
42
+
43
+ '
44
+ hydra_help:
45
+ template: 'Hydra (${hydra.runtime.version})
46
+
47
+ See https://hydra.cc for more info.
48
+
49
+
50
+ == Flags ==
51
+
52
+ $FLAGS_HELP
53
+
54
+
55
+ == Configuration groups ==
56
+
57
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
58
+ to command line)
59
+
60
+
61
+ $HYDRA_CONFIG_GROUPS
62
+
63
+
64
+ Use ''--cfg hydra'' to Show the Hydra config.
65
+
66
+ '
67
+ hydra_help: ???
68
+ hydra_logging:
69
+ version: 1
70
+ formatters:
71
+ colorlog:
72
+ (): colorlog.ColoredFormatter
73
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
74
+ handlers:
75
+ console:
76
+ class: logging.StreamHandler
77
+ formatter: colorlog
78
+ stream: ext://sys.stdout
79
+ root:
80
+ level: INFO
81
+ handlers:
82
+ - console
83
+ disable_existing_loggers: false
84
+ job_logging:
85
+ version: 1
86
+ formatters:
87
+ simple:
88
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
89
+ colorlog:
90
+ (): colorlog.ColoredFormatter
91
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
92
+ - %(message)s'
93
+ log_colors:
94
+ DEBUG: purple
95
+ INFO: green
96
+ WARNING: yellow
97
+ ERROR: red
98
+ CRITICAL: red
99
+ handlers:
100
+ console:
101
+ class: logging.StreamHandler
102
+ formatter: colorlog
103
+ stream: ext://sys.stdout
104
+ file:
105
+ class: logging.FileHandler
106
+ formatter: simple
107
+ filename: ${hydra.job.name}.log
108
+ root:
109
+ level: INFO
110
+ handlers:
111
+ - console
112
+ - file
113
+ disable_existing_loggers: false
114
+ env: {}
115
+ mode: RUN
116
+ searchpath: []
117
+ callbacks: {}
118
+ output_subdir: .hydra
119
+ overrides:
120
+ hydra:
121
+ - hydra.run.dir=./runs/text_generation/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02
122
+ - hydra.mode=RUN
123
+ task:
124
+ - backend.model=HuggingFaceTB/SmolLM-1.7B
125
+ - backend.processor=HuggingFaceTB/SmolLM-1.7B
126
+ job:
127
+ name: cli
128
+ chdir: true
129
+ override_dirname: backend.model=HuggingFaceTB/SmolLM-1.7B,backend.processor=HuggingFaceTB/SmolLM-1.7B
130
+ id: ???
131
+ num: ???
132
+ config_name: text_generation
133
+ env_set:
134
+ OVERRIDE_BENCHMARKS: '1'
135
+ env_copy: []
136
+ config:
137
+ override_dirname:
138
+ kv_sep: '='
139
+ item_sep: ','
140
+ exclude_keys: []
141
+ runtime:
142
+ version: 1.3.2
143
+ version_base: '1.3'
144
+ cwd: /
145
+ config_sources:
146
+ - path: hydra.conf
147
+ schema: pkg
148
+ provider: hydra
149
+ - path: optimum_benchmark
150
+ schema: pkg
151
+ provider: main
152
+ - path: hydra_plugins.hydra_colorlog.conf
153
+ schema: pkg
154
+ provider: hydra-colorlog
155
+ - path: /optimum-benchmark/examples/energy_star
156
+ schema: file
157
+ provider: command-line
158
+ - path: ''
159
+ schema: structured
160
+ provider: schema
161
+ output_dir: /runs/text_generation/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02
162
+ choices:
163
+ benchmark: energy_star
164
+ launcher: process
165
+ backend: pytorch
166
+ hydra/env: default
167
+ hydra/callbacks: null
168
+ hydra/job_logging: colorlog
169
+ hydra/hydra_logging: colorlog
170
+ hydra/hydra_help: default
171
+ hydra/help: default
172
+ hydra/sweeper: basic
173
+ hydra/launcher: basic
174
+ hydra/output: default
175
+ verbose: false
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/.hydra/overrides.yaml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ - backend.model=HuggingFaceTB/SmolLM-1.7B
2
+ - backend.processor=HuggingFaceTB/SmolLM-1.7B
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/benchmark_report.json ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "prefill": {
3
+ "memory": null,
4
+ "latency": null,
5
+ "throughput": null,
6
+ "energy": {
7
+ "unit": "kWh",
8
+ "cpu": 0.0009210179793032473,
9
+ "ram": 9.403775706541059e-06,
10
+ "gpu": 0.006402295760721532,
11
+ "total": 0.007332717515731321
12
+ },
13
+ "efficiency": {
14
+ "unit": "tokens/kWh",
15
+ "value": 41960978.223952904
16
+ },
17
+ "measures": [
18
+ {
19
+ "unit": "kWh",
20
+ "cpu": 0.0010188414567649793,
21
+ "ram": 1.040020860331343e-05,
22
+ "gpu": 0.007069801211392779,
23
+ "total": 0.008099042876761072
24
+ },
25
+ {
26
+ "unit": "kWh",
27
+ "cpu": 0.001023928069784597,
28
+ "ram": 1.045351056190972e-05,
29
+ "gpu": 0.007115094580959713,
30
+ "total": 0.008149476161306221
31
+ },
32
+ {
33
+ "unit": "kWh",
34
+ "cpu": 0.0010232283780346432,
35
+ "ram": 1.0447744339112577e-05,
36
+ "gpu": 0.007115182636586503,
37
+ "total": 0.008148858758960258
38
+ },
39
+ {
40
+ "unit": "kWh",
41
+ "cpu": 0.0010241352881440744,
42
+ "ram": 1.0456856244100832e-05,
43
+ "gpu": 0.00711364291313199,
44
+ "total": 0.008148235057520166
45
+ },
46
+ {
47
+ "unit": "kWh",
48
+ "cpu": 0.0010237236436746672,
49
+ "ram": 1.0452806977406536e-05,
50
+ "gpu": 0.007123706532293106,
51
+ "total": 0.008157882982945182
52
+ },
53
+ {
54
+ "unit": "kWh",
55
+ "cpu": 0.0010238973002910726,
56
+ "ram": 1.0454782526115431e-05,
57
+ "gpu": 0.00711877319501486,
58
+ "total": 0.008153125277832046
59
+ },
60
+ {
61
+ "unit": "kWh",
62
+ "cpu": 0.0,
63
+ "ram": 0.0,
64
+ "gpu": 0.0,
65
+ "total": 0.0
66
+ },
67
+ {
68
+ "unit": "kWh",
69
+ "cpu": 0.001024103350314949,
70
+ "ram": 1.0456892804618941e-05,
71
+ "gpu": 0.0071262034787364925,
72
+ "total": 0.008160763721856056
73
+ },
74
+ {
75
+ "unit": "kWh",
76
+ "cpu": 0.0010241413993481215,
77
+ "ram": 1.0457277251147982e-05,
78
+ "gpu": 0.007125078477836055,
79
+ "total": 0.008159677154435316
80
+ },
81
+ {
82
+ "unit": "kWh",
83
+ "cpu": 0.001024180906675368,
84
+ "ram": 1.0457677757685137e-05,
85
+ "gpu": 0.007115474581263825,
86
+ "total": 0.008150113165696887
87
+ }
88
+ ]
89
+ },
90
+ "decode": {
91
+ "memory": null,
92
+ "latency": null,
93
+ "throughput": null,
94
+ "energy": {
95
+ "unit": "kWh",
96
+ "cpu": 0.0017958659208215804,
97
+ "ram": 1.8341901791392123e-05,
98
+ "gpu": 0.007306251844996492,
99
+ "total": 0.009120459667609458
100
+ },
101
+ "efficiency": {
102
+ "unit": "tokens/kWh",
103
+ "value": 986792.3688060087
104
+ },
105
+ "measures": [
106
+ {
107
+ "unit": "kWh",
108
+ "cpu": 0.0020010180013199305,
109
+ "ram": 2.0439056319295045e-05,
110
+ "gpu": 0.008227584915394992,
111
+ "total": 0.010249041973034218
112
+ },
113
+ {
114
+ "unit": "kWh",
115
+ "cpu": 0.0019951513697100583,
116
+ "ram": 2.0378089452524212e-05,
117
+ "gpu": 0.0081139814911797,
118
+ "total": 0.010129510950342285
119
+ },
120
+ {
121
+ "unit": "kWh",
122
+ "cpu": 0.0019969063428461812,
123
+ "ram": 2.039467949555688e-05,
124
+ "gpu": 0.008126311778821282,
125
+ "total": 0.010143612801163042
126
+ },
127
+ {
128
+ "unit": "kWh",
129
+ "cpu": 0.00199376487094794,
130
+ "ram": 2.03630047581031e-05,
131
+ "gpu": 0.008109437043099632,
132
+ "total": 0.010123564918805665
133
+ },
134
+ {
135
+ "unit": "kWh",
136
+ "cpu": -0.0010237236436746672,
137
+ "ram": -1.0452806977406536e-05,
138
+ "gpu": -0.007123706532293106,
139
+ "total": -0.008157882982945182
140
+ },
141
+ {
142
+ "unit": "kWh",
143
+ "cpu": 0.001994637724863909,
144
+ "ram": 2.037162971133379e-05,
145
+ "gpu": 0.008115028992016882,
146
+ "total": 0.010130038346592124
147
+ },
148
+ {
149
+ "unit": "kWh",
150
+ "cpu": 0.0030181286351364765,
151
+ "ram": 3.0822295893551696e-05,
152
+ "gpu": 0.015217404673913748,
153
+ "total": 0.018266355604943774
154
+ },
155
+ {
156
+ "unit": "kWh",
157
+ "cpu": 0.0019955356547212005,
158
+ "ram": 2.0380813163960854e-05,
159
+ "gpu": 0.008098154811851721,
160
+ "total": 0.010114071279736882
161
+ },
162
+ {
163
+ "unit": "kWh",
164
+ "cpu": 0.0019944307125576103,
165
+ "ram": 2.0369530424909583e-05,
166
+ "gpu": 0.008090167583239882,
167
+ "total": 0.010104967826222402
168
+ },
169
+ {
170
+ "unit": "kWh",
171
+ "cpu": 0.0019928095397871697,
172
+ "ram": 2.0352725672092604e-05,
173
+ "gpu": 0.008088153692740185,
174
+ "total": 0.010101315958199408
175
+ }
176
+ ]
177
+ },
178
+ "per_token": {
179
+ "memory": null,
180
+ "latency": null,
181
+ "throughput": null,
182
+ "energy": null,
183
+ "efficiency": null,
184
+ "measures": null
185
+ },
186
+ "preprocess": {
187
+ "memory": null,
188
+ "latency": null,
189
+ "throughput": null,
190
+ "energy": {
191
+ "unit": "kWh",
192
+ "cpu": 1.0438337355784218e-05,
193
+ "ram": 7.78486547950319e-08,
194
+ "gpu": 1.6238901880072376e-05,
195
+ "total": 2.6755087890651626e-05
196
+ },
197
+ "efficiency": {
198
+ "unit": "samples/kWh",
199
+ "value": 37376068.58504866
200
+ },
201
+ "measures": null
202
+ }
203
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/cli.log ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [2024-10-24 15:16:05,195][launcher][INFO] - ََAllocating process launcher
2
+ [2024-10-24 15:16:05,195][process][INFO] - + Setting multiprocessing start method to spawn.
3
+ [2024-10-24 15:16:05,207][process][INFO] - + Launched benchmark in isolated process 180.
4
+ [PROC-0][2024-10-24 15:16:07,766][datasets][INFO] - PyTorch version 2.4.0 available.
5
+ [PROC-0][2024-10-24 15:16:08,703][backend][INFO] - َAllocating pytorch backend
6
+ [PROC-0][2024-10-24 15:16:08,703][backend][INFO] - + Setting random seed to 42
7
+ [PROC-0][2024-10-24 15:16:09,749][pytorch][INFO] - + Using AutoModel class AutoModelForCausalLM
8
+ [PROC-0][2024-10-24 15:16:09,749][pytorch][INFO] - + Creating backend temporary directory
9
+ [PROC-0][2024-10-24 15:16:09,750][pytorch][INFO] - + Loading model with random weights
10
+ [PROC-0][2024-10-24 15:16:09,750][pytorch][INFO] - + Creating no weights model
11
+ [PROC-0][2024-10-24 15:16:09,750][pytorch][INFO] - + Creating no weights model directory
12
+ [PROC-0][2024-10-24 15:16:09,750][pytorch][INFO] - + Creating no weights model state dict
13
+ [PROC-0][2024-10-24 15:16:09,773][pytorch][INFO] - + Saving no weights model safetensors
14
+ [PROC-0][2024-10-24 15:16:09,773][pytorch][INFO] - + Saving no weights model pretrained config
15
+ [PROC-0][2024-10-24 15:16:09,774][pytorch][INFO] - + Loading no weights AutoModel
16
+ [PROC-0][2024-10-24 15:16:09,774][pytorch][INFO] - + Loading model directly on device: cuda
17
+ [PROC-0][2024-10-24 15:16:10,025][pytorch][INFO] - + Turning on model's eval mode
18
+ [PROC-0][2024-10-24 15:16:10,032][benchmark][INFO] - Allocating energy_star benchmark
19
+ [PROC-0][2024-10-24 15:16:10,032][energy_star][INFO] - + Loading raw dataset
20
+ [PROC-0][2024-10-24 15:16:11,499][energy_star][INFO] - + Updating Text Generation kwargs with default values
21
+ [PROC-0][2024-10-24 15:16:11,499][energy_star][INFO] - + Initializing Text Generation report
22
+ [PROC-0][2024-10-24 15:16:11,499][energy][INFO] - + Tracking GPU energy on devices [0]
23
+ [PROC-0][2024-10-24 15:16:15,698][energy_star][INFO] - + Preprocessing dataset
24
+ [PROC-0][2024-10-24 15:16:16,583][energy][INFO] - + Saving codecarbon emission data to preprocess_codecarbon.json
25
+ [PROC-0][2024-10-24 15:16:16,583][energy_star][INFO] - + Preparing backend for Inference
26
+ [PROC-0][2024-10-24 15:16:16,583][energy_star][INFO] - + Initialising dataloader
27
+ [PROC-0][2024-10-24 15:16:16,584][energy_star][INFO] - + Warming up backend for Inference
28
+ [PROC-0][2024-10-24 15:16:18,192][energy_star][INFO] - + Additional warmup for Text Generation
29
+ [PROC-0][2024-10-24 15:16:18,432][energy_star][INFO] - + Running Text Generation energy tracking for 10 iterations
30
+ [PROC-0][2024-10-24 15:16:18,432][energy_star][INFO] - + Prefill iteration 1/10
31
+ [PROC-0][2024-10-24 15:17:44,735][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
32
+ [PROC-0][2024-10-24 15:17:44,735][energy_star][INFO] - + Prefill iteration 2/10
33
+ [PROC-0][2024-10-24 15:19:11,469][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
34
+ [PROC-0][2024-10-24 15:19:11,469][energy_star][INFO] - + Prefill iteration 3/10
35
+ [PROC-0][2024-10-24 15:20:38,143][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
36
+ [PROC-0][2024-10-24 15:20:38,143][energy_star][INFO] - + Prefill iteration 4/10
37
+ [PROC-0][2024-10-24 15:22:04,894][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
38
+ [PROC-0][2024-10-24 15:22:04,895][energy_star][INFO] - + Prefill iteration 5/10
39
+ [PROC-0][2024-10-24 15:23:31,611][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
40
+ [PROC-0][2024-10-24 15:23:31,611][energy_star][INFO] - + Prefill iteration 6/10
41
+ [PROC-0][2024-10-24 15:24:58,342][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
42
+ [PROC-0][2024-10-24 15:24:58,342][energy_star][INFO] - + Prefill iteration 7/10
43
+ [PROC-0][2024-10-24 15:26:25,098][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
44
+ [PROC-0][2024-10-24 15:26:25,099][energy_star][INFO] - + Prefill iteration 8/10
45
+ [PROC-0][2024-10-24 15:27:51,847][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
46
+ [PROC-0][2024-10-24 15:27:51,847][energy_star][INFO] - + Prefill iteration 9/10
47
+ [PROC-0][2024-10-24 15:29:18,599][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
48
+ [PROC-0][2024-10-24 15:29:18,599][energy_star][INFO] - + Prefill iteration 10/10
49
+ [PROC-0][2024-10-24 15:30:45,354][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
50
+ [PROC-0][2024-10-24 15:30:45,354][energy_star][INFO] - + Decoding iteration 1/10
51
+ [PROC-0][2024-10-24 15:35:01,155][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
52
+ [PROC-0][2024-10-24 15:35:01,155][energy_star][INFO] - + Decoding iteration 2/10
53
+ [PROC-0][2024-10-24 15:39:16,890][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
54
+ [PROC-0][2024-10-24 15:39:16,890][energy_star][INFO] - + Decoding iteration 3/10
55
+ [PROC-0][2024-10-24 15:43:32,714][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
56
+ [PROC-0][2024-10-24 15:43:32,714][energy_star][INFO] - + Decoding iteration 4/10
57
+ [PROC-0][2024-10-24 15:47:48,349][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
58
+ [PROC-0][2024-10-24 15:47:48,349][energy_star][INFO] - + Decoding iteration 5/10
59
+ [PROC-0][2024-10-24 15:52:04,040][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
60
+ [PROC-0][2024-10-24 15:52:04,040][energy_star][INFO] - + Decoding iteration 6/10
61
+ [PROC-0][2024-10-24 15:56:19,729][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
62
+ [PROC-0][2024-10-24 15:56:19,729][energy_star][INFO] - + Decoding iteration 7/10
63
+ [PROC-0][2024-10-24 16:00:35,383][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
64
+ [PROC-0][2024-10-24 16:00:35,383][energy_star][INFO] - + Decoding iteration 8/10
65
+ [PROC-0][2024-10-24 16:04:51,165][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
66
+ [PROC-0][2024-10-24 16:04:51,165][energy_star][INFO] - + Decoding iteration 9/10
67
+ [PROC-0][2024-10-24 16:09:06,857][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
68
+ [PROC-0][2024-10-24 16:09:06,857][energy_star][INFO] - + Decoding iteration 10/10
69
+ [PROC-0][2024-10-24 16:13:22,415][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
70
+ [PROC-0][2024-10-24 16:13:22,415][energy][INFO] - + prefill energy consumption:
71
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + CPU: 0.000921 (kWh)
72
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + GPU: 0.006402 (kWh)
73
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + RAM: 0.000009 (kWh)
74
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + total: 0.007333 (kWh)
75
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + prefill_iteration_1 energy consumption:
76
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + CPU: 0.001019 (kWh)
77
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + GPU: 0.007070 (kWh)
78
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + RAM: 0.000010 (kWh)
79
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + total: 0.008099 (kWh)
80
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + prefill_iteration_2 energy consumption:
81
+ [PROC-0][2024-10-24 16:13:22,416][energy][INFO] - + CPU: 0.001024 (kWh)
82
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + GPU: 0.007115 (kWh)
83
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + RAM: 0.000010 (kWh)
84
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + total: 0.008149 (kWh)
85
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + prefill_iteration_3 energy consumption:
86
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + CPU: 0.001023 (kWh)
87
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + GPU: 0.007115 (kWh)
88
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + RAM: 0.000010 (kWh)
89
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + total: 0.008149 (kWh)
90
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + prefill_iteration_4 energy consumption:
91
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + CPU: 0.001024 (kWh)
92
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + GPU: 0.007114 (kWh)
93
+ [PROC-0][2024-10-24 16:13:22,417][energy][INFO] - + RAM: 0.000010 (kWh)
94
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + total: 0.008148 (kWh)
95
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + prefill_iteration_5 energy consumption:
96
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + CPU: 0.001024 (kWh)
97
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + GPU: 0.007124 (kWh)
98
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + RAM: 0.000010 (kWh)
99
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + total: 0.008158 (kWh)
100
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + prefill_iteration_6 energy consumption:
101
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + CPU: 0.001024 (kWh)
102
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + GPU: 0.007119 (kWh)
103
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + RAM: 0.000010 (kWh)
104
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + total: 0.008153 (kWh)
105
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + prefill_iteration_7 energy consumption:
106
+ [PROC-0][2024-10-24 16:13:22,418][energy][INFO] - + CPU: 0.000000 (kWh)
107
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + GPU: 0.000000 (kWh)
108
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + RAM: 0.000000 (kWh)
109
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + total: 0.000000 (kWh)
110
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + prefill_iteration_8 energy consumption:
111
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + CPU: 0.001024 (kWh)
112
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + GPU: 0.007126 (kWh)
113
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + RAM: 0.000010 (kWh)
114
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + total: 0.008161 (kWh)
115
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + prefill_iteration_9 energy consumption:
116
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + CPU: 0.001024 (kWh)
117
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + GPU: 0.007125 (kWh)
118
+ [PROC-0][2024-10-24 16:13:22,419][energy][INFO] - + RAM: 0.000010 (kWh)
119
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + total: 0.008160 (kWh)
120
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + prefill_iteration_10 energy consumption:
121
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + CPU: 0.001024 (kWh)
122
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + GPU: 0.007115 (kWh)
123
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + RAM: 0.000010 (kWh)
124
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + total: 0.008150 (kWh)
125
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + decode energy consumption:
126
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + CPU: 0.001796 (kWh)
127
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + GPU: 0.007306 (kWh)
128
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + RAM: 0.000018 (kWh)
129
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + total: 0.009120 (kWh)
130
+ [PROC-0][2024-10-24 16:13:22,420][energy][INFO] - + decode_iteration_1 energy consumption:
131
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + CPU: 0.002001 (kWh)
132
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + GPU: 0.008228 (kWh)
133
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + RAM: 0.000020 (kWh)
134
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + total: 0.010249 (kWh)
135
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + decode_iteration_2 energy consumption:
136
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + CPU: 0.001995 (kWh)
137
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + GPU: 0.008114 (kWh)
138
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + RAM: 0.000020 (kWh)
139
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + total: 0.010130 (kWh)
140
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + decode_iteration_3 energy consumption:
141
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + CPU: 0.001997 (kWh)
142
+ [PROC-0][2024-10-24 16:13:22,421][energy][INFO] - + GPU: 0.008126 (kWh)
143
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + RAM: 0.000020 (kWh)
144
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + total: 0.010144 (kWh)
145
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + decode_iteration_4 energy consumption:
146
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + CPU: 0.001994 (kWh)
147
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + GPU: 0.008109 (kWh)
148
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + RAM: 0.000020 (kWh)
149
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + total: 0.010124 (kWh)
150
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + decode_iteration_5 energy consumption:
151
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + CPU: -0.001024 (kWh)
152
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + GPU: -0.007124 (kWh)
153
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + RAM: -0.000010 (kWh)
154
+ [PROC-0][2024-10-24 16:13:22,422][energy][INFO] - + total: -0.008158 (kWh)
155
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + decode_iteration_6 energy consumption:
156
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + CPU: 0.001995 (kWh)
157
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + GPU: 0.008115 (kWh)
158
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + RAM: 0.000020 (kWh)
159
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + total: 0.010130 (kWh)
160
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + decode_iteration_7 energy consumption:
161
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + CPU: 0.003018 (kWh)
162
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + GPU: 0.015217 (kWh)
163
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + RAM: 0.000031 (kWh)
164
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + total: 0.018266 (kWh)
165
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + decode_iteration_8 energy consumption:
166
+ [PROC-0][2024-10-24 16:13:22,423][energy][INFO] - + CPU: 0.001996 (kWh)
167
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + GPU: 0.008098 (kWh)
168
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + RAM: 0.000020 (kWh)
169
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + total: 0.010114 (kWh)
170
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + decode_iteration_9 energy consumption:
171
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + CPU: 0.001994 (kWh)
172
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + GPU: 0.008090 (kWh)
173
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + RAM: 0.000020 (kWh)
174
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + total: 0.010105 (kWh)
175
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + decode_iteration_10 energy consumption:
176
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + CPU: 0.001993 (kWh)
177
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + GPU: 0.008088 (kWh)
178
+ [PROC-0][2024-10-24 16:13:22,424][energy][INFO] - + RAM: 0.000020 (kWh)
179
+ [PROC-0][2024-10-24 16:13:22,425][energy][INFO] - + total: 0.010101 (kWh)
180
+ [PROC-0][2024-10-24 16:13:22,425][energy][INFO] - + preprocess energy consumption:
181
+ [PROC-0][2024-10-24 16:13:22,425][energy][INFO] - + CPU: 0.000010 (kWh)
182
+ [PROC-0][2024-10-24 16:13:22,425][energy][INFO] - + GPU: 0.000016 (kWh)
183
+ [PROC-0][2024-10-24 16:13:22,425][energy][INFO] - + RAM: 0.000000 (kWh)
184
+ [PROC-0][2024-10-24 16:13:22,425][energy][INFO] - + total: 0.000027 (kWh)
185
+ [PROC-0][2024-10-24 16:13:22,425][energy][INFO] - + prefill energy efficiency: 41960978.223953 (tokens/kWh)
186
+ [PROC-0][2024-10-24 16:13:22,425][energy][INFO] - + decode energy efficiency: 986792.368806 (tokens/kWh)
187
+ [PROC-0][2024-10-24 16:13:22,425][energy][INFO] - + preprocess energy efficiency: 37376068.585049 (samples/kWh)
188
+ [2024-10-24 16:13:23,134][datasets][INFO] - PyTorch version 2.4.0 available.
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/error.log ADDED
The diff for this file is too large to render. See raw diff
 
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/experiment_config.json ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "experiment_name": "text_generation",
3
+ "backend": {
4
+ "name": "pytorch",
5
+ "version": "2.4.0",
6
+ "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
7
+ "task": "text-generation",
8
+ "model": "HuggingFaceTB/SmolLM-1.7B",
9
+ "processor": "HuggingFaceTB/SmolLM-1.7B",
10
+ "library": "transformers",
11
+ "device": "cuda",
12
+ "device_ids": "0",
13
+ "seed": 42,
14
+ "inter_op_num_threads": null,
15
+ "intra_op_num_threads": null,
16
+ "hub_kwargs": {
17
+ "revision": "main",
18
+ "force_download": false,
19
+ "local_files_only": false,
20
+ "trust_remote_code": true
21
+ },
22
+ "no_weights": true,
23
+ "device_map": null,
24
+ "torch_dtype": null,
25
+ "amp_autocast": false,
26
+ "amp_dtype": null,
27
+ "eval_mode": true,
28
+ "to_bettertransformer": false,
29
+ "low_cpu_mem_usage": null,
30
+ "attn_implementation": null,
31
+ "cache_implementation": null,
32
+ "torch_compile": false,
33
+ "torch_compile_config": {},
34
+ "quantization_scheme": null,
35
+ "quantization_config": {},
36
+ "deepspeed_inference": false,
37
+ "deepspeed_inference_config": {},
38
+ "peft_type": null,
39
+ "peft_config": {}
40
+ },
41
+ "launcher": {
42
+ "name": "process",
43
+ "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
44
+ "device_isolation": false,
45
+ "device_isolation_action": "warn",
46
+ "start_method": "spawn"
47
+ },
48
+ "benchmark": {
49
+ "name": "energy_star",
50
+ "_target_": "optimum_benchmark.benchmarks.energy_star.benchmark.EnergyStarBenchmark",
51
+ "dataset_name": "EnergyStarAI/text_generation",
52
+ "dataset_config": "",
53
+ "dataset_split": "train",
54
+ "num_samples": 1000,
55
+ "input_shapes": {
56
+ "batch_size": 1
57
+ },
58
+ "text_column_name": "text",
59
+ "truncation": true,
60
+ "max_length": -1,
61
+ "dataset_prefix1": "",
62
+ "dataset_prefix2": "",
63
+ "t5_task": "",
64
+ "image_column_name": "image",
65
+ "resize": false,
66
+ "question_column_name": "question",
67
+ "context_column_name": "context",
68
+ "sentence1_column_name": "sentence1",
69
+ "sentence2_column_name": "sentence2",
70
+ "audio_column_name": "audio",
71
+ "iterations": 10,
72
+ "warmup_runs": 10,
73
+ "energy": true,
74
+ "forward_kwargs": {},
75
+ "generate_kwargs": {
76
+ "max_new_tokens": 10,
77
+ "min_new_tokens": 10
78
+ },
79
+ "call_kwargs": {}
80
+ },
81
+ "environment": {
82
+ "cpu": " AMD EPYC 7R32",
83
+ "cpu_count": 48,
84
+ "cpu_ram_mb": 200472.73984,
85
+ "system": "Linux",
86
+ "machine": "x86_64",
87
+ "platform": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
88
+ "processor": "x86_64",
89
+ "python_version": "3.9.20",
90
+ "gpu": [
91
+ "NVIDIA A10G"
92
+ ],
93
+ "gpu_count": 1,
94
+ "gpu_vram_mb": 24146608128,
95
+ "optimum_benchmark_version": "0.2.0",
96
+ "optimum_benchmark_commit": null,
97
+ "transformers_version": "4.44.0",
98
+ "transformers_commit": null,
99
+ "accelerate_version": "0.33.0",
100
+ "accelerate_commit": null,
101
+ "diffusers_version": "0.30.0",
102
+ "diffusers_commit": null,
103
+ "optimum_version": null,
104
+ "optimum_commit": null,
105
+ "timm_version": null,
106
+ "timm_commit": null,
107
+ "peft_version": null,
108
+ "peft_commit": null
109
+ }
110
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/generate_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-24T16:13:22",
3
+ "project_name": "codecarbon",
4
+ "run_id": "09de8d70-881d-4fdb-8c44-21203f36a2d4",
5
+ "duration": -1729257239.6805792,
6
+ "emissions": 0.006737229935266403,
7
+ "emissions_rate": 2.6363137366527802e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 214.17631320777406,
10
+ "ram_power": 0.43403291702270513,
11
+ "cpu_energy": 0.0030169904464625377,
12
+ "gpu_energy": 0.01520362827400401,
13
+ "ram_energy": 3.081040342977774e-05,
14
+ "energy_consumed": 0.018251429123896296,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/prefill_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-24T15:30:45",
3
+ "project_name": "codecarbon",
4
+ "run_id": "09de8d70-881d-4fdb-8c44-21203f36a2d4",
5
+ "duration": -1729257408.4832995,
6
+ "emissions": 0.0030084869531586504,
7
+ "emissions_rate": 3.46790702566556e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 295.2757932025827,
10
+ "ram_power": 0.4339728355407715,
11
+ "cpu_energy": 0.001024180906675368,
12
+ "gpu_energy": 0.007115474581263825,
13
+ "ram_energy": 1.0457677757685137e-05,
14
+ "energy_consumed": 0.008150113165696887,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-1.7B/2024-10-24-15-16-02/preprocess_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-24T15:16:16",
3
+ "project_name": "codecarbon",
4
+ "run_id": "09de8d70-881d-4fdb-8c44-21203f36a2d4",
5
+ "duration": -1729257494.353458,
6
+ "emissions": 9.87622272392776e-06,
7
+ "emissions_rate": 1.1195621893978048e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 66.29090500161253,
10
+ "ram_power": 0.3180885314941406,
11
+ "cpu_energy": 1.0438337355784218e-05,
12
+ "gpu_energy": 1.6238901880072376e-05,
13
+ "ram_energy": 7.78486547950319e-08,
14
+ "energy_consumed": 2.6755087890651626e-05,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/.hydra/config.yaml ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ backend:
2
+ name: pytorch
3
+ version: 2.4.0
4
+ _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend
5
+ task: text-generation
6
+ model: HuggingFaceTB/SmolLM-135M
7
+ processor: HuggingFaceTB/SmolLM-135M
8
+ library: null
9
+ device: cuda
10
+ device_ids: '0'
11
+ seed: 42
12
+ inter_op_num_threads: null
13
+ intra_op_num_threads: null
14
+ hub_kwargs: {}
15
+ no_weights: true
16
+ device_map: null
17
+ torch_dtype: null
18
+ amp_autocast: false
19
+ amp_dtype: null
20
+ eval_mode: true
21
+ to_bettertransformer: false
22
+ low_cpu_mem_usage: null
23
+ attn_implementation: null
24
+ cache_implementation: null
25
+ torch_compile: false
26
+ torch_compile_config: {}
27
+ quantization_scheme: null
28
+ quantization_config: {}
29
+ deepspeed_inference: false
30
+ deepspeed_inference_config: {}
31
+ peft_type: null
32
+ peft_config: {}
33
+ launcher:
34
+ name: process
35
+ _target_: optimum_benchmark.launchers.process.launcher.ProcessLauncher
36
+ device_isolation: false
37
+ device_isolation_action: warn
38
+ start_method: spawn
39
+ benchmark:
40
+ name: energy_star
41
+ _target_: optimum_benchmark.benchmarks.energy_star.benchmark.EnergyStarBenchmark
42
+ dataset_name: EnergyStarAI/text_generation
43
+ dataset_config: ''
44
+ dataset_split: train
45
+ num_samples: 1000
46
+ input_shapes:
47
+ batch_size: 1
48
+ text_column_name: text
49
+ truncation: true
50
+ max_length: -1
51
+ dataset_prefix1: ''
52
+ dataset_prefix2: ''
53
+ t5_task: ''
54
+ image_column_name: image
55
+ resize: false
56
+ question_column_name: question
57
+ context_column_name: context
58
+ sentence1_column_name: sentence1
59
+ sentence2_column_name: sentence2
60
+ audio_column_name: audio
61
+ iterations: 10
62
+ warmup_runs: 10
63
+ energy: true
64
+ forward_kwargs: {}
65
+ generate_kwargs:
66
+ max_new_tokens: 10
67
+ min_new_tokens: 10
68
+ call_kwargs: {}
69
+ experiment_name: text_generation
70
+ environment:
71
+ cpu: ' AMD EPYC 7R32'
72
+ cpu_count: 48
73
+ cpu_ram_mb: 200472.73984
74
+ system: Linux
75
+ machine: x86_64
76
+ platform: Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35
77
+ processor: x86_64
78
+ python_version: 3.9.20
79
+ gpu:
80
+ - NVIDIA A10G
81
+ gpu_count: 1
82
+ gpu_vram_mb: 24146608128
83
+ optimum_benchmark_version: 0.2.0
84
+ optimum_benchmark_commit: null
85
+ transformers_version: 4.44.0
86
+ transformers_commit: null
87
+ accelerate_version: 0.33.0
88
+ accelerate_commit: null
89
+ diffusers_version: 0.30.0
90
+ diffusers_commit: null
91
+ optimum_version: null
92
+ optimum_commit: null
93
+ timm_version: null
94
+ timm_commit: null
95
+ peft_version: null
96
+ peft_commit: null
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/.hydra/hydra.yaml ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: ./runs/text_generation/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15
4
+ sweep:
5
+ dir: sweeps/${experiment_name}/${backend.model}/${now:%Y-%m-%d-%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
11
+ max_batch_size: null
12
+ params: null
13
+ help:
14
+ app_name: ${hydra.job.name}
15
+ header: '${hydra.help.app_name} is powered by Hydra.
16
+
17
+ '
18
+ footer: 'Powered by Hydra (https://hydra.cc)
19
+
20
+ Use --hydra-help to view Hydra specific help
21
+
22
+ '
23
+ template: '${hydra.help.header}
24
+
25
+ == Configuration groups ==
26
+
27
+ Compose your configuration from those groups (group=option)
28
+
29
+
30
+ $APP_CONFIG_GROUPS
31
+
32
+
33
+ == Config ==
34
+
35
+ Override anything in the config (foo.bar=value)
36
+
37
+
38
+ $CONFIG
39
+
40
+
41
+ ${hydra.help.footer}
42
+
43
+ '
44
+ hydra_help:
45
+ template: 'Hydra (${hydra.runtime.version})
46
+
47
+ See https://hydra.cc for more info.
48
+
49
+
50
+ == Flags ==
51
+
52
+ $FLAGS_HELP
53
+
54
+
55
+ == Configuration groups ==
56
+
57
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
58
+ to command line)
59
+
60
+
61
+ $HYDRA_CONFIG_GROUPS
62
+
63
+
64
+ Use ''--cfg hydra'' to Show the Hydra config.
65
+
66
+ '
67
+ hydra_help: ???
68
+ hydra_logging:
69
+ version: 1
70
+ formatters:
71
+ colorlog:
72
+ (): colorlog.ColoredFormatter
73
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
74
+ handlers:
75
+ console:
76
+ class: logging.StreamHandler
77
+ formatter: colorlog
78
+ stream: ext://sys.stdout
79
+ root:
80
+ level: INFO
81
+ handlers:
82
+ - console
83
+ disable_existing_loggers: false
84
+ job_logging:
85
+ version: 1
86
+ formatters:
87
+ simple:
88
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
89
+ colorlog:
90
+ (): colorlog.ColoredFormatter
91
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
92
+ - %(message)s'
93
+ log_colors:
94
+ DEBUG: purple
95
+ INFO: green
96
+ WARNING: yellow
97
+ ERROR: red
98
+ CRITICAL: red
99
+ handlers:
100
+ console:
101
+ class: logging.StreamHandler
102
+ formatter: colorlog
103
+ stream: ext://sys.stdout
104
+ file:
105
+ class: logging.FileHandler
106
+ formatter: simple
107
+ filename: ${hydra.job.name}.log
108
+ root:
109
+ level: INFO
110
+ handlers:
111
+ - console
112
+ - file
113
+ disable_existing_loggers: false
114
+ env: {}
115
+ mode: RUN
116
+ searchpath: []
117
+ callbacks: {}
118
+ output_subdir: .hydra
119
+ overrides:
120
+ hydra:
121
+ - hydra.run.dir=./runs/text_generation/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15
122
+ - hydra.mode=RUN
123
+ task:
124
+ - backend.model=HuggingFaceTB/SmolLM-135M
125
+ - backend.processor=HuggingFaceTB/SmolLM-135M
126
+ job:
127
+ name: cli
128
+ chdir: true
129
+ override_dirname: backend.model=HuggingFaceTB/SmolLM-135M,backend.processor=HuggingFaceTB/SmolLM-135M
130
+ id: ???
131
+ num: ???
132
+ config_name: text_generation
133
+ env_set:
134
+ OVERRIDE_BENCHMARKS: '1'
135
+ env_copy: []
136
+ config:
137
+ override_dirname:
138
+ kv_sep: '='
139
+ item_sep: ','
140
+ exclude_keys: []
141
+ runtime:
142
+ version: 1.3.2
143
+ version_base: '1.3'
144
+ cwd: /
145
+ config_sources:
146
+ - path: hydra.conf
147
+ schema: pkg
148
+ provider: hydra
149
+ - path: optimum_benchmark
150
+ schema: pkg
151
+ provider: main
152
+ - path: hydra_plugins.hydra_colorlog.conf
153
+ schema: pkg
154
+ provider: hydra-colorlog
155
+ - path: /optimum-benchmark/examples/energy_star
156
+ schema: file
157
+ provider: command-line
158
+ - path: ''
159
+ schema: structured
160
+ provider: schema
161
+ output_dir: /runs/text_generation/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15
162
+ choices:
163
+ benchmark: energy_star
164
+ launcher: process
165
+ backend: pytorch
166
+ hydra/env: default
167
+ hydra/callbacks: null
168
+ hydra/job_logging: colorlog
169
+ hydra/hydra_logging: colorlog
170
+ hydra/hydra_help: default
171
+ hydra/help: default
172
+ hydra/sweeper: basic
173
+ hydra/launcher: basic
174
+ hydra/output: default
175
+ verbose: false
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/.hydra/overrides.yaml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ - backend.model=HuggingFaceTB/SmolLM-135M
2
+ - backend.processor=HuggingFaceTB/SmolLM-135M
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/benchmark_report.json ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "prefill": {
3
+ "memory": null,
4
+ "latency": null,
5
+ "throughput": null,
6
+ "energy": {
7
+ "unit": "kWh",
8
+ "cpu": 0.00028216661832921095,
9
+ "ram": 2.878352463094162e-06,
10
+ "gpu": 0.0011204146741087939,
11
+ "total": 0.001405459644901099
12
+ },
13
+ "efficiency": {
14
+ "unit": "tokens/kWh",
15
+ "value": 218923397.1364946
16
+ },
17
+ "measures": [
18
+ {
19
+ "unit": "kWh",
20
+ "cpu": 0.00031336019538074554,
21
+ "ram": 3.1952333195117534e-06,
22
+ "gpu": 0.00122852237170612,
23
+ "total": 0.0015450778004063774
24
+ },
25
+ {
26
+ "unit": "kWh",
27
+ "cpu": 0.00031393739547250644,
28
+ "ram": 3.202335114385519e-06,
29
+ "gpu": 0.0012359929332390607,
30
+ "total": 0.001553132663825953
31
+ },
32
+ {
33
+ "unit": "kWh",
34
+ "cpu": 0.0003159219112732292,
35
+ "ram": 3.222888771863899e-06,
36
+ "gpu": 0.0012393301581283822,
37
+ "total": 0.0015584749581734753
38
+ },
39
+ {
40
+ "unit": "kWh",
41
+ "cpu": 0.0003150893582905055,
42
+ "ram": 3.2143926946747804e-06,
43
+ "gpu": 0.0012481104429333811,
44
+ "total": 0.0015664141939185612
45
+ },
46
+ {
47
+ "unit": "kWh",
48
+ "cpu": 0.0003144043295370163,
49
+ "ram": 3.207396461775205e-06,
50
+ "gpu": 0.001258061839781277,
51
+ "total": 0.0015756735657800683
52
+ },
53
+ {
54
+ "unit": "kWh",
55
+ "cpu": 0.0003145929484800339,
56
+ "ram": 3.2093337072112605e-06,
57
+ "gpu": 0.0012565132274318103,
58
+ "total": 0.001574315509619055
59
+ },
60
+ {
61
+ "unit": "kWh",
62
+ "cpu": 0.0,
63
+ "ram": 0.0,
64
+ "gpu": 0.0,
65
+ "total": 0.0
66
+ },
67
+ {
68
+ "unit": "kWh",
69
+ "cpu": 0.00031270235966302526,
70
+ "ram": 3.190109472312273e-06,
71
+ "gpu": 0.0012584437845308827,
72
+ "total": 0.001574336253666219
73
+ },
74
+ {
75
+ "unit": "kWh",
76
+ "cpu": 0.0003087726142439349,
77
+ "ram": 3.149867500644629e-06,
78
+ "gpu": 0.0012466090528420182,
79
+ "total": 0.001558531534586599
80
+ },
81
+ {
82
+ "unit": "kWh",
83
+ "cpu": 0.00031288507095111233,
84
+ "ram": 3.1919675885623027e-06,
85
+ "gpu": 0.001232562930495007,
86
+ "total": 0.0015486399690346833
87
+ }
88
+ ]
89
+ },
90
+ "decode": {
91
+ "memory": null,
92
+ "latency": null,
93
+ "throughput": null,
94
+ "energy": {
95
+ "unit": "kWh",
96
+ "cpu": 0.0020209151121617142,
97
+ "ram": 2.0623012510417966e-05,
98
+ "gpu": 0.004044847485875458,
99
+ "total": 0.00608638561054759
100
+ },
101
+ "efficiency": {
102
+ "unit": "tokens/kWh",
103
+ "value": 1478710.1205686298
104
+ },
105
+ "measures": [
106
+ {
107
+ "unit": "kWh",
108
+ "cpu": 0.002228578082469823,
109
+ "ram": 2.2743844645549306e-05,
110
+ "gpu": 0.004435512992850832,
111
+ "total": 0.006686834919966205
112
+ },
113
+ {
114
+ "unit": "kWh",
115
+ "cpu": 0.0022643340706578674,
116
+ "ram": 2.3107646810448203e-05,
117
+ "gpu": 0.004506180549384453,
118
+ "total": 0.00679362226685277
119
+ },
120
+ {
121
+ "unit": "kWh",
122
+ "cpu": 0.002240564864718504,
123
+ "ram": 2.2864817856105112e-05,
124
+ "gpu": 0.004480500528844189,
125
+ "total": 0.006743930211418801
126
+ },
127
+ {
128
+ "unit": "kWh",
129
+ "cpu": 0.002231354974089644,
130
+ "ram": 2.277091024922105e-05,
131
+ "gpu": 0.0044298010438375,
132
+ "total": 0.0066839269281763645
133
+ },
134
+ {
135
+ "unit": "kWh",
136
+ "cpu": -0.0003144043295370163,
137
+ "ram": -3.207396461775205e-06,
138
+ "gpu": -0.001258061839781277,
139
+ "total": -0.0015756735657800683
140
+ },
141
+ {
142
+ "unit": "kWh",
143
+ "cpu": 0.0022437327763739938,
144
+ "ram": 2.2895809829293018e-05,
145
+ "gpu": 0.004527934455678562,
146
+ "total": 0.006794563041881842
147
+ },
148
+ {
149
+ "unit": "kWh",
150
+ "cpu": 0.0025504533577234215,
151
+ "ram": 2.6025299423921498e-05,
152
+ "gpu": 0.00576570933478493,
153
+ "total": 0.008342187991932273
154
+ },
155
+ {
156
+ "unit": "kWh",
157
+ "cpu": 0.0022642733620749727,
158
+ "ram": 2.3105812715842442e-05,
159
+ "gpu": 0.004556943645553702,
160
+ "total": 0.006844322820344518
161
+ },
162
+ {
163
+ "unit": "kWh",
164
+ "cpu": 0.0022470556892481295,
165
+ "ram": 2.2930371845914078e-05,
166
+ "gpu": 0.004533760849227519,
167
+ "total": 0.006803746910321554
168
+ },
169
+ {
170
+ "unit": "kWh",
171
+ "cpu": 0.0022532082737978065,
172
+ "ram": 2.299300818966014e-05,
173
+ "gpu": 0.00447019329837417,
174
+ "total": 0.00674639458036163
175
+ }
176
+ ]
177
+ },
178
+ "per_token": {
179
+ "memory": null,
180
+ "latency": null,
181
+ "throughput": null,
182
+ "energy": null,
183
+ "efficiency": null,
184
+ "measures": null
185
+ },
186
+ "preprocess": {
187
+ "memory": null,
188
+ "latency": null,
189
+ "throughput": null,
190
+ "energy": {
191
+ "unit": "kWh",
192
+ "cpu": 1.0718486329682895e-05,
193
+ "ram": 7.992339706452299e-08,
194
+ "gpu": 1.7620569652265772e-05,
195
+ "total": 2.841897937901319e-05
196
+ },
197
+ "efficiency": {
198
+ "unit": "samples/kWh",
199
+ "value": 35187752.05342098
200
+ },
201
+ "measures": null
202
+ }
203
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/cli.log ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [2024-10-23 19:09:18,036][launcher][INFO] - ََAllocating process launcher
2
+ [2024-10-23 19:09:18,036][process][INFO] - + Setting multiprocessing start method to spawn.
3
+ [2024-10-23 19:09:18,048][process][INFO] - + Launched benchmark in isolated process 200.
4
+ [PROC-0][2024-10-23 19:09:20,588][datasets][INFO] - PyTorch version 2.4.0 available.
5
+ [PROC-0][2024-10-23 19:09:21,586][backend][INFO] - َAllocating pytorch backend
6
+ [PROC-0][2024-10-23 19:09:21,586][backend][INFO] - + Setting random seed to 42
7
+ [PROC-0][2024-10-23 19:09:22,339][pytorch][INFO] - + Using AutoModel class AutoModelForCausalLM
8
+ [PROC-0][2024-10-23 19:09:22,339][pytorch][INFO] - + Creating backend temporary directory
9
+ [PROC-0][2024-10-23 19:09:22,339][pytorch][INFO] - + Loading model with random weights
10
+ [PROC-0][2024-10-23 19:09:22,339][pytorch][INFO] - + Creating no weights model
11
+ [PROC-0][2024-10-23 19:09:22,339][pytorch][INFO] - + Creating no weights model directory
12
+ [PROC-0][2024-10-23 19:09:22,339][pytorch][INFO] - + Creating no weights model state dict
13
+ [PROC-0][2024-10-23 19:09:22,362][pytorch][INFO] - + Saving no weights model safetensors
14
+ [PROC-0][2024-10-23 19:09:22,362][pytorch][INFO] - + Saving no weights model pretrained config
15
+ [PROC-0][2024-10-23 19:09:22,363][pytorch][INFO] - + Loading no weights AutoModel
16
+ [PROC-0][2024-10-23 19:09:22,363][pytorch][INFO] - + Loading model directly on device: cuda
17
+ [PROC-0][2024-10-23 19:09:22,632][pytorch][INFO] - + Turning on model's eval mode
18
+ [PROC-0][2024-10-23 19:09:22,639][benchmark][INFO] - Allocating energy_star benchmark
19
+ [PROC-0][2024-10-23 19:09:22,639][energy_star][INFO] - + Loading raw dataset
20
+ [PROC-0][2024-10-23 19:09:23,925][energy_star][INFO] - + Updating Text Generation kwargs with default values
21
+ [PROC-0][2024-10-23 19:09:23,925][energy_star][INFO] - + Initializing Text Generation report
22
+ [PROC-0][2024-10-23 19:09:23,925][energy][INFO] - + Tracking GPU energy on devices [0]
23
+ [PROC-0][2024-10-23 19:09:28,124][energy_star][INFO] - + Preprocessing dataset
24
+ [PROC-0][2024-10-23 19:09:29,033][energy][INFO] - + Saving codecarbon emission data to preprocess_codecarbon.json
25
+ [PROC-0][2024-10-23 19:09:29,034][energy_star][INFO] - + Preparing backend for Inference
26
+ [PROC-0][2024-10-23 19:09:29,034][energy_star][INFO] - + Initialising dataloader
27
+ [PROC-0][2024-10-23 19:09:29,034][energy_star][INFO] - + Warming up backend for Inference
28
+ [PROC-0][2024-10-23 19:09:30,243][energy_star][INFO] - + Additional warmup for Text Generation
29
+ [PROC-0][2024-10-23 19:09:30,452][energy_star][INFO] - + Running Text Generation energy tracking for 10 iterations
30
+ [PROC-0][2024-10-23 19:09:30,452][energy_star][INFO] - + Prefill iteration 1/10
31
+ [PROC-0][2024-10-23 19:09:56,997][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
32
+ [PROC-0][2024-10-23 19:09:56,997][energy_star][INFO] - + Prefill iteration 2/10
33
+ [PROC-0][2024-10-23 19:10:23,590][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
34
+ [PROC-0][2024-10-23 19:10:23,590][energy_star][INFO] - + Prefill iteration 3/10
35
+ [PROC-0][2024-10-23 19:10:50,351][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
36
+ [PROC-0][2024-10-23 19:10:50,352][energy_star][INFO] - + Prefill iteration 4/10
37
+ [PROC-0][2024-10-23 19:11:17,042][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
38
+ [PROC-0][2024-10-23 19:11:17,043][energy_star][INFO] - + Prefill iteration 5/10
39
+ [PROC-0][2024-10-23 19:11:43,675][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
40
+ [PROC-0][2024-10-23 19:11:43,676][energy_star][INFO] - + Prefill iteration 6/10
41
+ [PROC-0][2024-10-23 19:12:10,324][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
42
+ [PROC-0][2024-10-23 19:12:10,324][energy_star][INFO] - + Prefill iteration 7/10
43
+ [PROC-0][2024-10-23 19:12:36,878][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
44
+ [PROC-0][2024-10-23 19:12:36,878][energy_star][INFO] - + Prefill iteration 8/10
45
+ [PROC-0][2024-10-23 19:13:03,366][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
46
+ [PROC-0][2024-10-23 19:13:03,367][energy_star][INFO] - + Prefill iteration 9/10
47
+ [PROC-0][2024-10-23 19:13:29,522][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
48
+ [PROC-0][2024-10-23 19:13:29,523][energy_star][INFO] - + Prefill iteration 10/10
49
+ [PROC-0][2024-10-23 19:13:56,027][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
50
+ [PROC-0][2024-10-23 19:13:56,027][energy_star][INFO] - + Decoding iteration 1/10
51
+ [PROC-0][2024-10-23 19:17:31,345][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
52
+ [PROC-0][2024-10-23 19:17:31,345][energy_star][INFO] - + Decoding iteration 2/10
53
+ [PROC-0][2024-10-23 19:21:09,741][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
54
+ [PROC-0][2024-10-23 19:21:09,741][energy_star][INFO] - + Decoding iteration 3/10
55
+ [PROC-0][2024-10-23 19:24:46,291][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
56
+ [PROC-0][2024-10-23 19:24:46,292][energy_star][INFO] - + Decoding iteration 4/10
57
+ [PROC-0][2024-10-23 19:28:21,991][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
58
+ [PROC-0][2024-10-23 19:28:21,991][energy_star][INFO] - + Decoding iteration 5/10
59
+ [PROC-0][2024-10-23 19:31:58,797][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
60
+ [PROC-0][2024-10-23 19:31:58,797][energy_star][INFO] - + Decoding iteration 6/10
61
+ [PROC-0][2024-10-23 19:35:35,503][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
62
+ [PROC-0][2024-10-23 19:35:35,503][energy_star][INFO] - + Decoding iteration 7/10
63
+ [PROC-0][2024-10-23 19:39:11,542][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
64
+ [PROC-0][2024-10-23 19:39:11,543][energy_star][INFO] - + Decoding iteration 8/10
65
+ [PROC-0][2024-10-23 19:42:49,828][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
66
+ [PROC-0][2024-10-23 19:42:49,829][energy_star][INFO] - + Decoding iteration 9/10
67
+ [PROC-0][2024-10-23 19:46:26,323][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
68
+ [PROC-0][2024-10-23 19:46:26,323][energy_star][INFO] - + Decoding iteration 10/10
69
+ [PROC-0][2024-10-23 19:50:03,687][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
70
+ [PROC-0][2024-10-23 19:50:03,688][energy][INFO] - + prefill energy consumption:
71
+ [PROC-0][2024-10-23 19:50:03,688][energy][INFO] - + CPU: 0.000282 (kWh)
72
+ [PROC-0][2024-10-23 19:50:03,688][energy][INFO] - + GPU: 0.001120 (kWh)
73
+ [PROC-0][2024-10-23 19:50:03,688][energy][INFO] - + RAM: 0.000003 (kWh)
74
+ [PROC-0][2024-10-23 19:50:03,688][energy][INFO] - + total: 0.001405 (kWh)
75
+ [PROC-0][2024-10-23 19:50:03,688][energy][INFO] - + prefill_iteration_1 energy consumption:
76
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + CPU: 0.000313 (kWh)
77
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + GPU: 0.001229 (kWh)
78
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + RAM: 0.000003 (kWh)
79
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + total: 0.001545 (kWh)
80
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + prefill_iteration_2 energy consumption:
81
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + CPU: 0.000314 (kWh)
82
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + GPU: 0.001236 (kWh)
83
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + RAM: 0.000003 (kWh)
84
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + total: 0.001553 (kWh)
85
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + prefill_iteration_3 energy consumption:
86
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + CPU: 0.000316 (kWh)
87
+ [PROC-0][2024-10-23 19:50:03,689][energy][INFO] - + GPU: 0.001239 (kWh)
88
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + RAM: 0.000003 (kWh)
89
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + total: 0.001558 (kWh)
90
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + prefill_iteration_4 energy consumption:
91
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + CPU: 0.000315 (kWh)
92
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + GPU: 0.001248 (kWh)
93
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + RAM: 0.000003 (kWh)
94
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + total: 0.001566 (kWh)
95
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + prefill_iteration_5 energy consumption:
96
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + CPU: 0.000314 (kWh)
97
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + GPU: 0.001258 (kWh)
98
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + RAM: 0.000003 (kWh)
99
+ [PROC-0][2024-10-23 19:50:03,690][energy][INFO] - + total: 0.001576 (kWh)
100
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + prefill_iteration_6 energy consumption:
101
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + CPU: 0.000315 (kWh)
102
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + GPU: 0.001257 (kWh)
103
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + RAM: 0.000003 (kWh)
104
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + total: 0.001574 (kWh)
105
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + prefill_iteration_7 energy consumption:
106
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + CPU: 0.000000 (kWh)
107
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + GPU: 0.000000 (kWh)
108
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + RAM: 0.000000 (kWh)
109
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + total: 0.000000 (kWh)
110
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + prefill_iteration_8 energy consumption:
111
+ [PROC-0][2024-10-23 19:50:03,691][energy][INFO] - + CPU: 0.000313 (kWh)
112
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + GPU: 0.001258 (kWh)
113
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + RAM: 0.000003 (kWh)
114
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + total: 0.001574 (kWh)
115
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + prefill_iteration_9 energy consumption:
116
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + CPU: 0.000309 (kWh)
117
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + GPU: 0.001247 (kWh)
118
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + RAM: 0.000003 (kWh)
119
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + total: 0.001559 (kWh)
120
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + prefill_iteration_10 energy consumption:
121
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + CPU: 0.000313 (kWh)
122
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + GPU: 0.001233 (kWh)
123
+ [PROC-0][2024-10-23 19:50:03,692][energy][INFO] - + RAM: 0.000003 (kWh)
124
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + total: 0.001549 (kWh)
125
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + decode energy consumption:
126
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + CPU: 0.002021 (kWh)
127
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + GPU: 0.004045 (kWh)
128
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + RAM: 0.000021 (kWh)
129
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + total: 0.006086 (kWh)
130
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + decode_iteration_1 energy consumption:
131
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + CPU: 0.002229 (kWh)
132
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + GPU: 0.004436 (kWh)
133
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + RAM: 0.000023 (kWh)
134
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + total: 0.006687 (kWh)
135
+ [PROC-0][2024-10-23 19:50:03,693][energy][INFO] - + decode_iteration_2 energy consumption:
136
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + CPU: 0.002264 (kWh)
137
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + GPU: 0.004506 (kWh)
138
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + RAM: 0.000023 (kWh)
139
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + total: 0.006794 (kWh)
140
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + decode_iteration_3 energy consumption:
141
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + CPU: 0.002241 (kWh)
142
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + GPU: 0.004481 (kWh)
143
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + RAM: 0.000023 (kWh)
144
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + total: 0.006744 (kWh)
145
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + decode_iteration_4 energy consumption:
146
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + CPU: 0.002231 (kWh)
147
+ [PROC-0][2024-10-23 19:50:03,694][energy][INFO] - + GPU: 0.004430 (kWh)
148
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + RAM: 0.000023 (kWh)
149
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + total: 0.006684 (kWh)
150
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + decode_iteration_5 energy consumption:
151
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + CPU: -0.000314 (kWh)
152
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + GPU: -0.001258 (kWh)
153
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + RAM: -0.000003 (kWh)
154
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + total: -0.001576 (kWh)
155
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + decode_iteration_6 energy consumption:
156
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + CPU: 0.002244 (kWh)
157
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + GPU: 0.004528 (kWh)
158
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + RAM: 0.000023 (kWh)
159
+ [PROC-0][2024-10-23 19:50:03,695][energy][INFO] - + total: 0.006795 (kWh)
160
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + decode_iteration_7 energy consumption:
161
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + CPU: 0.002550 (kWh)
162
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + GPU: 0.005766 (kWh)
163
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + RAM: 0.000026 (kWh)
164
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + total: 0.008342 (kWh)
165
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + decode_iteration_8 energy consumption:
166
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + CPU: 0.002264 (kWh)
167
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + GPU: 0.004557 (kWh)
168
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + RAM: 0.000023 (kWh)
169
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + total: 0.006844 (kWh)
170
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + decode_iteration_9 energy consumption:
171
+ [PROC-0][2024-10-23 19:50:03,696][energy][INFO] - + CPU: 0.002247 (kWh)
172
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + GPU: 0.004534 (kWh)
173
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + RAM: 0.000023 (kWh)
174
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + total: 0.006804 (kWh)
175
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + decode_iteration_10 energy consumption:
176
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + CPU: 0.002253 (kWh)
177
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + GPU: 0.004470 (kWh)
178
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + RAM: 0.000023 (kWh)
179
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + total: 0.006746 (kWh)
180
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + preprocess energy consumption:
181
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + CPU: 0.000011 (kWh)
182
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + GPU: 0.000018 (kWh)
183
+ [PROC-0][2024-10-23 19:50:03,697][energy][INFO] - + RAM: 0.000000 (kWh)
184
+ [PROC-0][2024-10-23 19:50:03,698][energy][INFO] - + total: 0.000028 (kWh)
185
+ [PROC-0][2024-10-23 19:50:03,698][energy][INFO] - + prefill energy efficiency: 218923397.136495 (tokens/kWh)
186
+ [PROC-0][2024-10-23 19:50:03,698][energy][INFO] - + decode energy efficiency: 1478710.120569 (tokens/kWh)
187
+ [PROC-0][2024-10-23 19:50:03,698][energy][INFO] - + preprocess energy efficiency: 35187752.053421 (samples/kWh)
188
+ [2024-10-23 19:50:04,393][datasets][INFO] - PyTorch version 2.4.0 available.
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/error.log ADDED
The diff for this file is too large to render. See raw diff
 
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/experiment_config.json ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "experiment_name": "text_generation",
3
+ "backend": {
4
+ "name": "pytorch",
5
+ "version": "2.4.0",
6
+ "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
7
+ "task": "text-generation",
8
+ "model": "HuggingFaceTB/SmolLM-135M",
9
+ "processor": "HuggingFaceTB/SmolLM-135M",
10
+ "library": "transformers",
11
+ "device": "cuda",
12
+ "device_ids": "0",
13
+ "seed": 42,
14
+ "inter_op_num_threads": null,
15
+ "intra_op_num_threads": null,
16
+ "hub_kwargs": {
17
+ "revision": "main",
18
+ "force_download": false,
19
+ "local_files_only": false,
20
+ "trust_remote_code": true
21
+ },
22
+ "no_weights": true,
23
+ "device_map": null,
24
+ "torch_dtype": null,
25
+ "amp_autocast": false,
26
+ "amp_dtype": null,
27
+ "eval_mode": true,
28
+ "to_bettertransformer": false,
29
+ "low_cpu_mem_usage": null,
30
+ "attn_implementation": null,
31
+ "cache_implementation": null,
32
+ "torch_compile": false,
33
+ "torch_compile_config": {},
34
+ "quantization_scheme": null,
35
+ "quantization_config": {},
36
+ "deepspeed_inference": false,
37
+ "deepspeed_inference_config": {},
38
+ "peft_type": null,
39
+ "peft_config": {}
40
+ },
41
+ "launcher": {
42
+ "name": "process",
43
+ "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
44
+ "device_isolation": false,
45
+ "device_isolation_action": "warn",
46
+ "start_method": "spawn"
47
+ },
48
+ "benchmark": {
49
+ "name": "energy_star",
50
+ "_target_": "optimum_benchmark.benchmarks.energy_star.benchmark.EnergyStarBenchmark",
51
+ "dataset_name": "EnergyStarAI/text_generation",
52
+ "dataset_config": "",
53
+ "dataset_split": "train",
54
+ "num_samples": 1000,
55
+ "input_shapes": {
56
+ "batch_size": 1
57
+ },
58
+ "text_column_name": "text",
59
+ "truncation": true,
60
+ "max_length": -1,
61
+ "dataset_prefix1": "",
62
+ "dataset_prefix2": "",
63
+ "t5_task": "",
64
+ "image_column_name": "image",
65
+ "resize": false,
66
+ "question_column_name": "question",
67
+ "context_column_name": "context",
68
+ "sentence1_column_name": "sentence1",
69
+ "sentence2_column_name": "sentence2",
70
+ "audio_column_name": "audio",
71
+ "iterations": 10,
72
+ "warmup_runs": 10,
73
+ "energy": true,
74
+ "forward_kwargs": {},
75
+ "generate_kwargs": {
76
+ "max_new_tokens": 10,
77
+ "min_new_tokens": 10
78
+ },
79
+ "call_kwargs": {}
80
+ },
81
+ "environment": {
82
+ "cpu": " AMD EPYC 7R32",
83
+ "cpu_count": 48,
84
+ "cpu_ram_mb": 200472.73984,
85
+ "system": "Linux",
86
+ "machine": "x86_64",
87
+ "platform": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
88
+ "processor": "x86_64",
89
+ "python_version": "3.9.20",
90
+ "gpu": [
91
+ "NVIDIA A10G"
92
+ ],
93
+ "gpu_count": 1,
94
+ "gpu_vram_mb": 24146608128,
95
+ "optimum_benchmark_version": "0.2.0",
96
+ "optimum_benchmark_commit": null,
97
+ "transformers_version": "4.44.0",
98
+ "transformers_commit": null,
99
+ "accelerate_version": "0.33.0",
100
+ "accelerate_commit": null,
101
+ "diffusers_version": "0.30.0",
102
+ "diffusers_commit": null,
103
+ "optimum_version": null,
104
+ "optimum_commit": null,
105
+ "timm_version": null,
106
+ "timm_commit": null,
107
+ "peft_version": null,
108
+ "peft_commit": null
109
+ }
110
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/generate_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-23T19:50:03",
3
+ "project_name": "codecarbon",
4
+ "run_id": "05f83304-edf6-4273-b97d-e5de644eb411",
5
+ "duration": -1728779136.923329,
6
+ "emissions": 0.003061982417973605,
7
+ "emissions_rate": 1.4087063947890143e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 94.45086032614498,
10
+ "ram_power": 0.433685302734375,
11
+ "cpu_energy": 0.002566093344748919,
12
+ "gpu_energy": 0.005702756228869177,
13
+ "ram_energy": 2.6184975778222443e-05,
14
+ "energy_consumed": 0.008295034549396313,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/prefill_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-23T19:13:56",
3
+ "project_name": "codecarbon",
4
+ "run_id": "05f83304-edf6-4273-b97d-e5de644eb411",
5
+ "duration": -1728779327.7865484,
6
+ "emissions": 0.0005716562515463526,
7
+ "emissions_rate": 2.157349516334259e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 167.43685773439154,
10
+ "ram_power": 0.43362236022949224,
11
+ "cpu_energy": 0.00031288507095111233,
12
+ "gpu_energy": 0.001232562930495007,
13
+ "ram_energy": 3.1919675885623027e-06,
14
+ "energy_consumed": 0.0015486399690346833,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-135M/2024-10-23-19-09-15/preprocess_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-23T19:09:29",
3
+ "project_name": "codecarbon",
4
+ "run_id": "05f83304-edf6-4273-b97d-e5de644eb411",
5
+ "duration": -1728779353.3788598,
6
+ "emissions": 1.0490422273361802e-05,
7
+ "emissions_rate": 1.158058972493827e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 70.04019833296246,
10
+ "ram_power": 0.31796836853027344,
11
+ "cpu_energy": 1.0718486329682895e-05,
12
+ "gpu_energy": 1.7620569652265772e-05,
13
+ "ram_energy": 7.992339706452299e-08,
14
+ "energy_consumed": 2.841897937901319e-05,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/.hydra/config.yaml ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ backend:
2
+ name: pytorch
3
+ version: 2.4.0
4
+ _target_: optimum_benchmark.backends.pytorch.backend.PyTorchBackend
5
+ task: text-generation
6
+ model: HuggingFaceTB/SmolLM-360M
7
+ processor: HuggingFaceTB/SmolLM-360M
8
+ library: null
9
+ device: cuda
10
+ device_ids: '0'
11
+ seed: 42
12
+ inter_op_num_threads: null
13
+ intra_op_num_threads: null
14
+ hub_kwargs: {}
15
+ no_weights: true
16
+ device_map: null
17
+ torch_dtype: null
18
+ amp_autocast: false
19
+ amp_dtype: null
20
+ eval_mode: true
21
+ to_bettertransformer: false
22
+ low_cpu_mem_usage: null
23
+ attn_implementation: null
24
+ cache_implementation: null
25
+ torch_compile: false
26
+ torch_compile_config: {}
27
+ quantization_scheme: null
28
+ quantization_config: {}
29
+ deepspeed_inference: false
30
+ deepspeed_inference_config: {}
31
+ peft_type: null
32
+ peft_config: {}
33
+ launcher:
34
+ name: process
35
+ _target_: optimum_benchmark.launchers.process.launcher.ProcessLauncher
36
+ device_isolation: false
37
+ device_isolation_action: warn
38
+ start_method: spawn
39
+ benchmark:
40
+ name: energy_star
41
+ _target_: optimum_benchmark.benchmarks.energy_star.benchmark.EnergyStarBenchmark
42
+ dataset_name: EnergyStarAI/text_generation
43
+ dataset_config: ''
44
+ dataset_split: train
45
+ num_samples: 1000
46
+ input_shapes:
47
+ batch_size: 1
48
+ text_column_name: text
49
+ truncation: true
50
+ max_length: -1
51
+ dataset_prefix1: ''
52
+ dataset_prefix2: ''
53
+ t5_task: ''
54
+ image_column_name: image
55
+ resize: false
56
+ question_column_name: question
57
+ context_column_name: context
58
+ sentence1_column_name: sentence1
59
+ sentence2_column_name: sentence2
60
+ audio_column_name: audio
61
+ iterations: 10
62
+ warmup_runs: 10
63
+ energy: true
64
+ forward_kwargs: {}
65
+ generate_kwargs:
66
+ max_new_tokens: 10
67
+ min_new_tokens: 10
68
+ call_kwargs: {}
69
+ experiment_name: text_generation
70
+ environment:
71
+ cpu: ' AMD EPYC 7R32'
72
+ cpu_count: 48
73
+ cpu_ram_mb: 200472.73984
74
+ system: Linux
75
+ machine: x86_64
76
+ platform: Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35
77
+ processor: x86_64
78
+ python_version: 3.9.20
79
+ gpu:
80
+ - NVIDIA A10G
81
+ gpu_count: 1
82
+ gpu_vram_mb: 24146608128
83
+ optimum_benchmark_version: 0.2.0
84
+ optimum_benchmark_commit: null
85
+ transformers_version: 4.44.0
86
+ transformers_commit: null
87
+ accelerate_version: 0.33.0
88
+ accelerate_commit: null
89
+ diffusers_version: 0.30.0
90
+ diffusers_commit: null
91
+ optimum_version: null
92
+ optimum_commit: null
93
+ timm_version: null
94
+ timm_commit: null
95
+ peft_version: null
96
+ peft_commit: null
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/.hydra/hydra.yaml ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: ./runs/text_generation/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22
4
+ sweep:
5
+ dir: sweeps/${experiment_name}/${backend.model}/${now:%Y-%m-%d-%H-%M-%S}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
11
+ max_batch_size: null
12
+ params: null
13
+ help:
14
+ app_name: ${hydra.job.name}
15
+ header: '${hydra.help.app_name} is powered by Hydra.
16
+
17
+ '
18
+ footer: 'Powered by Hydra (https://hydra.cc)
19
+
20
+ Use --hydra-help to view Hydra specific help
21
+
22
+ '
23
+ template: '${hydra.help.header}
24
+
25
+ == Configuration groups ==
26
+
27
+ Compose your configuration from those groups (group=option)
28
+
29
+
30
+ $APP_CONFIG_GROUPS
31
+
32
+
33
+ == Config ==
34
+
35
+ Override anything in the config (foo.bar=value)
36
+
37
+
38
+ $CONFIG
39
+
40
+
41
+ ${hydra.help.footer}
42
+
43
+ '
44
+ hydra_help:
45
+ template: 'Hydra (${hydra.runtime.version})
46
+
47
+ See https://hydra.cc for more info.
48
+
49
+
50
+ == Flags ==
51
+
52
+ $FLAGS_HELP
53
+
54
+
55
+ == Configuration groups ==
56
+
57
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
58
+ to command line)
59
+
60
+
61
+ $HYDRA_CONFIG_GROUPS
62
+
63
+
64
+ Use ''--cfg hydra'' to Show the Hydra config.
65
+
66
+ '
67
+ hydra_help: ???
68
+ hydra_logging:
69
+ version: 1
70
+ formatters:
71
+ colorlog:
72
+ (): colorlog.ColoredFormatter
73
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(purple)sHYDRA%(reset)s] %(message)s'
74
+ handlers:
75
+ console:
76
+ class: logging.StreamHandler
77
+ formatter: colorlog
78
+ stream: ext://sys.stdout
79
+ root:
80
+ level: INFO
81
+ handlers:
82
+ - console
83
+ disable_existing_loggers: false
84
+ job_logging:
85
+ version: 1
86
+ formatters:
87
+ simple:
88
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
89
+ colorlog:
90
+ (): colorlog.ColoredFormatter
91
+ format: '[%(cyan)s%(asctime)s%(reset)s][%(blue)s%(name)s%(reset)s][%(log_color)s%(levelname)s%(reset)s]
92
+ - %(message)s'
93
+ log_colors:
94
+ DEBUG: purple
95
+ INFO: green
96
+ WARNING: yellow
97
+ ERROR: red
98
+ CRITICAL: red
99
+ handlers:
100
+ console:
101
+ class: logging.StreamHandler
102
+ formatter: colorlog
103
+ stream: ext://sys.stdout
104
+ file:
105
+ class: logging.FileHandler
106
+ formatter: simple
107
+ filename: ${hydra.job.name}.log
108
+ root:
109
+ level: INFO
110
+ handlers:
111
+ - console
112
+ - file
113
+ disable_existing_loggers: false
114
+ env: {}
115
+ mode: RUN
116
+ searchpath: []
117
+ callbacks: {}
118
+ output_subdir: .hydra
119
+ overrides:
120
+ hydra:
121
+ - hydra.run.dir=./runs/text_generation/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22
122
+ - hydra.mode=RUN
123
+ task:
124
+ - backend.model=HuggingFaceTB/SmolLM-360M
125
+ - backend.processor=HuggingFaceTB/SmolLM-360M
126
+ job:
127
+ name: cli
128
+ chdir: true
129
+ override_dirname: backend.model=HuggingFaceTB/SmolLM-360M,backend.processor=HuggingFaceTB/SmolLM-360M
130
+ id: ???
131
+ num: ???
132
+ config_name: text_generation
133
+ env_set:
134
+ OVERRIDE_BENCHMARKS: '1'
135
+ env_copy: []
136
+ config:
137
+ override_dirname:
138
+ kv_sep: '='
139
+ item_sep: ','
140
+ exclude_keys: []
141
+ runtime:
142
+ version: 1.3.2
143
+ version_base: '1.3'
144
+ cwd: /
145
+ config_sources:
146
+ - path: hydra.conf
147
+ schema: pkg
148
+ provider: hydra
149
+ - path: optimum_benchmark
150
+ schema: pkg
151
+ provider: main
152
+ - path: hydra_plugins.hydra_colorlog.conf
153
+ schema: pkg
154
+ provider: hydra-colorlog
155
+ - path: /optimum-benchmark/examples/energy_star
156
+ schema: file
157
+ provider: command-line
158
+ - path: ''
159
+ schema: structured
160
+ provider: schema
161
+ output_dir: /runs/text_generation/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22
162
+ choices:
163
+ benchmark: energy_star
164
+ launcher: process
165
+ backend: pytorch
166
+ hydra/env: default
167
+ hydra/callbacks: null
168
+ hydra/job_logging: colorlog
169
+ hydra/hydra_logging: colorlog
170
+ hydra/hydra_help: default
171
+ hydra/help: default
172
+ hydra/sweeper: basic
173
+ hydra/launcher: basic
174
+ hydra/output: default
175
+ verbose: false
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/.hydra/overrides.yaml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ - backend.model=HuggingFaceTB/SmolLM-360M
2
+ - backend.processor=HuggingFaceTB/SmolLM-360M
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/benchmark_report.json ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "prefill": {
3
+ "memory": null,
4
+ "latency": null,
5
+ "throughput": null,
6
+ "energy": {
7
+ "unit": "kWh",
8
+ "cpu": 0.00038417323045180534,
9
+ "ram": 3.917567631278797e-06,
10
+ "gpu": 0.0020836598613710013,
11
+ "total": 0.002471750659454085
12
+ },
13
+ "efficiency": {
14
+ "unit": "tokens/kWh",
15
+ "value": 124481811.63546507
16
+ },
17
+ "measures": [
18
+ {
19
+ "unit": "kWh",
20
+ "cpu": 0.00042439884210412454,
21
+ "ram": 4.32643824564241e-06,
22
+ "gpu": 0.0022828590485080014,
23
+ "total": 0.0027115843288577682
24
+ },
25
+ {
26
+ "unit": "kWh",
27
+ "cpu": 0.00042775152907360186,
28
+ "ram": 4.361830192752804e-06,
29
+ "gpu": 0.002320344634051952,
30
+ "total": 0.0027524579933183063
31
+ },
32
+ {
33
+ "unit": "kWh",
34
+ "cpu": 0.0004269135638458061,
35
+ "ram": 4.353528012110313e-06,
36
+ "gpu": 0.002306500456309979,
37
+ "total": 0.002737767548167895
38
+ },
39
+ {
40
+ "unit": "kWh",
41
+ "cpu": 0.00042629300760428234,
42
+ "ram": 4.347332333560652e-06,
43
+ "gpu": 0.002313714350970053,
44
+ "total": 0.002744354690907896
45
+ },
46
+ {
47
+ "unit": "kWh",
48
+ "cpu": 0.00042705259539434323,
49
+ "ram": 4.355086217043936e-06,
50
+ "gpu": 0.0023207949121900606,
51
+ "total": 0.0027522025938014477
52
+ },
53
+ {
54
+ "unit": "kWh",
55
+ "cpu": 0.0004277230125062691,
56
+ "ram": 4.361916595886913e-06,
57
+ "gpu": 0.0023226774136958506,
58
+ "total": 0.002754762342798006
59
+ },
60
+ {
61
+ "unit": "kWh",
62
+ "cpu": 0.0,
63
+ "ram": 0.0,
64
+ "gpu": 0.0,
65
+ "total": 0.0
66
+ },
67
+ {
68
+ "unit": "kWh",
69
+ "cpu": 0.00042727887452002596,
70
+ "ram": 4.3573934032328814e-06,
71
+ "gpu": 0.002324739359790051,
72
+ "total": 0.0027563756277133103
73
+ },
74
+ {
75
+ "unit": "kWh",
76
+ "cpu": 0.0004273450137403721,
77
+ "ram": 4.357849664637245e-06,
78
+ "gpu": 0.0023255226937499707,
79
+ "total": 0.0027572255571549804
80
+ },
81
+ {
82
+ "unit": "kWh",
83
+ "cpu": 0.00042697586572922833,
84
+ "ram": 4.35430164792082e-06,
85
+ "gpu": 0.002319445744444093,
86
+ "total": 0.0027507759118212405
87
+ }
88
+ ]
89
+ },
90
+ "decode": {
91
+ "memory": null,
92
+ "latency": null,
93
+ "throughput": null,
94
+ "energy": {
95
+ "unit": "kWh",
96
+ "cpu": 0.0021575579513180274,
97
+ "ram": 2.201039115281833e-05,
98
+ "gpu": 0.004663859981084995,
99
+ "total": 0.0068434283235558405
100
+ },
101
+ "efficiency": {
102
+ "unit": "tokens/kWh",
103
+ "value": 1315130.308155782
104
+ },
105
+ "measures": [
106
+ {
107
+ "unit": "kWh",
108
+ "cpu": 0.0024012055666631877,
109
+ "ram": 2.449660161589356e-05,
110
+ "gpu": 0.005293597012651863,
111
+ "total": 0.007719299180930941
112
+ },
113
+ {
114
+ "unit": "kWh",
115
+ "cpu": 0.002399304204071693,
116
+ "ram": 2.447631017533231e-05,
117
+ "gpu": 0.005170139413886221,
118
+ "total": 0.007593919928133252
119
+ },
120
+ {
121
+ "unit": "kWh",
122
+ "cpu": 0.00239920346893047,
123
+ "ram": 2.4475506960495002e-05,
124
+ "gpu": 0.0051808830335919875,
125
+ "total": 0.007604562009482951
126
+ },
127
+ {
128
+ "unit": "kWh",
129
+ "cpu": 0.002397024483553307,
130
+ "ram": 2.4453144005693137e-05,
131
+ "gpu": 0.005157916904107829,
132
+ "total": 0.0075793945316668335
133
+ },
134
+ {
135
+ "unit": "kWh",
136
+ "cpu": -0.00042705259539434323,
137
+ "ram": -4.355086217043936e-06,
138
+ "gpu": -0.0023207949121900606,
139
+ "total": -0.0027522025938014477
140
+ },
141
+ {
142
+ "unit": "kWh",
143
+ "cpu": 0.002387084423529167,
144
+ "ram": 2.435184806787585e-05,
145
+ "gpu": 0.005154224678932229,
146
+ "total": 0.007565660950529268
147
+ },
148
+ {
149
+ "unit": "kWh",
150
+ "cpu": 0.0028258748023854002,
151
+ "ram": 2.8826662935648893e-05,
152
+ "gpu": 0.0074967232195958244,
153
+ "total": 0.010351424684916871
154
+ },
155
+ {
156
+ "unit": "kWh",
157
+ "cpu": 0.0024026328822716113,
158
+ "ram": 2.4510452757425444e-05,
159
+ "gpu": 0.005180848033564112,
160
+ "total": 0.0076079913685931436
161
+ },
162
+ {
163
+ "unit": "kWh",
164
+ "cpu": 0.0024013627431852193,
165
+ "ram": 2.4497710395037818e-05,
166
+ "gpu": 0.005174165528217922,
167
+ "total": 0.007600025981798177
168
+ },
169
+ {
170
+ "unit": "kWh",
171
+ "cpu": 0.002388939533984567,
172
+ "ram": 2.4370760831825227e-05,
173
+ "gpu": 0.005150896898492019,
174
+ "total": 0.007564207193308414
175
+ }
176
+ ]
177
+ },
178
+ "per_token": {
179
+ "memory": null,
180
+ "latency": null,
181
+ "throughput": null,
182
+ "energy": null,
183
+ "efficiency": null,
184
+ "measures": null
185
+ },
186
+ "preprocess": {
187
+ "memory": null,
188
+ "latency": null,
189
+ "throughput": null,
190
+ "energy": {
191
+ "unit": "kWh",
192
+ "cpu": 1.0503983233461947e-05,
193
+ "ram": 7.834823109019483e-08,
194
+ "gpu": 1.6727513381886716e-05,
195
+ "total": 2.730984484643886e-05
196
+ },
197
+ "efficiency": {
198
+ "unit": "samples/kWh",
199
+ "value": 36616831.97480331
200
+ },
201
+ "measures": null
202
+ }
203
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/cli.log ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [2024-10-24 14:22:25,779][launcher][INFO] - ََAllocating process launcher
2
+ [2024-10-24 14:22:25,779][process][INFO] - + Setting multiprocessing start method to spawn.
3
+ [2024-10-24 14:22:25,789][process][INFO] - + Launched benchmark in isolated process 180.
4
+ [PROC-0][2024-10-24 14:22:28,309][datasets][INFO] - PyTorch version 2.4.0 available.
5
+ [PROC-0][2024-10-24 14:22:29,215][backend][INFO] - َAllocating pytorch backend
6
+ [PROC-0][2024-10-24 14:22:29,216][backend][INFO] - + Setting random seed to 42
7
+ [PROC-0][2024-10-24 14:22:30,119][pytorch][INFO] - + Using AutoModel class AutoModelForCausalLM
8
+ [PROC-0][2024-10-24 14:22:30,120][pytorch][INFO] - + Creating backend temporary directory
9
+ [PROC-0][2024-10-24 14:22:30,120][pytorch][INFO] - + Loading model with random weights
10
+ [PROC-0][2024-10-24 14:22:30,120][pytorch][INFO] - + Creating no weights model
11
+ [PROC-0][2024-10-24 14:22:30,120][pytorch][INFO] - + Creating no weights model directory
12
+ [PROC-0][2024-10-24 14:22:30,120][pytorch][INFO] - + Creating no weights model state dict
13
+ [PROC-0][2024-10-24 14:22:30,141][pytorch][INFO] - + Saving no weights model safetensors
14
+ [PROC-0][2024-10-24 14:22:30,141][pytorch][INFO] - + Saving no weights model pretrained config
15
+ [PROC-0][2024-10-24 14:22:30,142][pytorch][INFO] - + Loading no weights AutoModel
16
+ [PROC-0][2024-10-24 14:22:30,142][pytorch][INFO] - + Loading model directly on device: cuda
17
+ [PROC-0][2024-10-24 14:22:30,414][pytorch][INFO] - + Turning on model's eval mode
18
+ [PROC-0][2024-10-24 14:22:30,421][benchmark][INFO] - Allocating energy_star benchmark
19
+ [PROC-0][2024-10-24 14:22:30,421][energy_star][INFO] - + Loading raw dataset
20
+ [PROC-0][2024-10-24 14:22:31,588][energy_star][INFO] - + Updating Text Generation kwargs with default values
21
+ [PROC-0][2024-10-24 14:22:31,588][energy_star][INFO] - + Initializing Text Generation report
22
+ [PROC-0][2024-10-24 14:22:31,588][energy][INFO] - + Tracking GPU energy on devices [0]
23
+ [PROC-0][2024-10-24 14:22:35,800][energy_star][INFO] - + Preprocessing dataset
24
+ [PROC-0][2024-10-24 14:22:36,690][energy][INFO] - + Saving codecarbon emission data to preprocess_codecarbon.json
25
+ [PROC-0][2024-10-24 14:22:36,691][energy_star][INFO] - + Preparing backend for Inference
26
+ [PROC-0][2024-10-24 14:22:36,691][energy_star][INFO] - + Initialising dataloader
27
+ [PROC-0][2024-10-24 14:22:36,691][energy_star][INFO] - + Warming up backend for Inference
28
+ [PROC-0][2024-10-24 14:22:37,946][energy_star][INFO] - + Additional warmup for Text Generation
29
+ [PROC-0][2024-10-24 14:22:38,177][energy_star][INFO] - + Running Text Generation energy tracking for 10 iterations
30
+ [PROC-0][2024-10-24 14:22:38,177][energy_star][INFO] - + Prefill iteration 1/10
31
+ [PROC-0][2024-10-24 14:23:14,127][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
32
+ [PROC-0][2024-10-24 14:23:14,127][energy_star][INFO] - + Prefill iteration 2/10
33
+ [PROC-0][2024-10-24 14:23:50,361][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
34
+ [PROC-0][2024-10-24 14:23:50,361][energy_star][INFO] - + Prefill iteration 3/10
35
+ [PROC-0][2024-10-24 14:24:26,524][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
36
+ [PROC-0][2024-10-24 14:24:26,524][energy_star][INFO] - + Prefill iteration 4/10
37
+ [PROC-0][2024-10-24 14:25:02,634][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
38
+ [PROC-0][2024-10-24 14:25:02,635][energy_star][INFO] - + Prefill iteration 5/10
39
+ [PROC-0][2024-10-24 14:25:38,809][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
40
+ [PROC-0][2024-10-24 14:25:38,810][energy_star][INFO] - + Prefill iteration 6/10
41
+ [PROC-0][2024-10-24 14:26:15,041][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
42
+ [PROC-0][2024-10-24 14:26:15,041][energy_star][INFO] - + Prefill iteration 7/10
43
+ [PROC-0][2024-10-24 14:26:51,222][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
44
+ [PROC-0][2024-10-24 14:26:51,222][energy_star][INFO] - + Prefill iteration 8/10
45
+ [PROC-0][2024-10-24 14:27:27,416][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
46
+ [PROC-0][2024-10-24 14:27:27,416][energy_star][INFO] - + Prefill iteration 9/10
47
+ [PROC-0][2024-10-24 14:28:03,615][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
48
+ [PROC-0][2024-10-24 14:28:03,616][energy_star][INFO] - + Prefill iteration 10/10
49
+ [PROC-0][2024-10-24 14:28:39,784][energy][INFO] - + Saving codecarbon emission data to prefill_codecarbon.json
50
+ [PROC-0][2024-10-24 14:28:39,784][energy_star][INFO] - + Decoding iteration 1/10
51
+ [PROC-0][2024-10-24 14:32:39,130][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
52
+ [PROC-0][2024-10-24 14:32:39,130][energy_star][INFO] - + Decoding iteration 2/10
53
+ [PROC-0][2024-10-24 14:36:38,599][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
54
+ [PROC-0][2024-10-24 14:36:38,600][energy_star][INFO] - + Decoding iteration 3/10
55
+ [PROC-0][2024-10-24 14:40:37,989][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
56
+ [PROC-0][2024-10-24 14:40:37,989][energy_star][INFO] - + Decoding iteration 4/10
57
+ [PROC-0][2024-10-24 14:44:37,142][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
58
+ [PROC-0][2024-10-24 14:44:37,142][energy_star][INFO] - + Decoding iteration 5/10
59
+ [PROC-0][2024-10-24 14:48:36,023][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
60
+ [PROC-0][2024-10-24 14:48:36,024][energy_star][INFO] - + Decoding iteration 6/10
61
+ [PROC-0][2024-10-24 14:52:34,455][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
62
+ [PROC-0][2024-10-24 14:52:34,455][energy_star][INFO] - + Decoding iteration 7/10
63
+ [PROC-0][2024-10-24 14:56:33,824][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
64
+ [PROC-0][2024-10-24 14:56:33,825][energy_star][INFO] - + Decoding iteration 8/10
65
+ [PROC-0][2024-10-24 15:00:33,535][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
66
+ [PROC-0][2024-10-24 15:00:33,536][energy_star][INFO] - + Decoding iteration 9/10
67
+ [PROC-0][2024-10-24 15:04:33,145][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
68
+ [PROC-0][2024-10-24 15:04:33,145][energy_star][INFO] - + Decoding iteration 10/10
69
+ [PROC-0][2024-10-24 15:08:31,670][energy][INFO] - + Saving codecarbon emission data to generate_codecarbon.json
70
+ [PROC-0][2024-10-24 15:08:31,671][energy][INFO] - + prefill energy consumption:
71
+ [PROC-0][2024-10-24 15:08:31,671][energy][INFO] - + CPU: 0.000384 (kWh)
72
+ [PROC-0][2024-10-24 15:08:31,671][energy][INFO] - + GPU: 0.002084 (kWh)
73
+ [PROC-0][2024-10-24 15:08:31,671][energy][INFO] - + RAM: 0.000004 (kWh)
74
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + total: 0.002472 (kWh)
75
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + prefill_iteration_1 energy consumption:
76
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + CPU: 0.000424 (kWh)
77
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + GPU: 0.002283 (kWh)
78
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + RAM: 0.000004 (kWh)
79
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + total: 0.002712 (kWh)
80
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + prefill_iteration_2 energy consumption:
81
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + CPU: 0.000428 (kWh)
82
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + GPU: 0.002320 (kWh)
83
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + RAM: 0.000004 (kWh)
84
+ [PROC-0][2024-10-24 15:08:31,672][energy][INFO] - + total: 0.002752 (kWh)
85
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + prefill_iteration_3 energy consumption:
86
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + CPU: 0.000427 (kWh)
87
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + GPU: 0.002307 (kWh)
88
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + RAM: 0.000004 (kWh)
89
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + total: 0.002738 (kWh)
90
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + prefill_iteration_4 energy consumption:
91
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + CPU: 0.000426 (kWh)
92
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + GPU: 0.002314 (kWh)
93
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + RAM: 0.000004 (kWh)
94
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + total: 0.002744 (kWh)
95
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + prefill_iteration_5 energy consumption:
96
+ [PROC-0][2024-10-24 15:08:31,673][energy][INFO] - + CPU: 0.000427 (kWh)
97
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + GPU: 0.002321 (kWh)
98
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + RAM: 0.000004 (kWh)
99
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + total: 0.002752 (kWh)
100
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + prefill_iteration_6 energy consumption:
101
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + CPU: 0.000428 (kWh)
102
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + GPU: 0.002323 (kWh)
103
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + RAM: 0.000004 (kWh)
104
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + total: 0.002755 (kWh)
105
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + prefill_iteration_7 energy consumption:
106
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + CPU: 0.000000 (kWh)
107
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + GPU: 0.000000 (kWh)
108
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + RAM: 0.000000 (kWh)
109
+ [PROC-0][2024-10-24 15:08:31,674][energy][INFO] - + total: 0.000000 (kWh)
110
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + prefill_iteration_8 energy consumption:
111
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + CPU: 0.000427 (kWh)
112
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + GPU: 0.002325 (kWh)
113
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + RAM: 0.000004 (kWh)
114
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + total: 0.002756 (kWh)
115
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + prefill_iteration_9 energy consumption:
116
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + CPU: 0.000427 (kWh)
117
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + GPU: 0.002326 (kWh)
118
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + RAM: 0.000004 (kWh)
119
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + total: 0.002757 (kWh)
120
+ [PROC-0][2024-10-24 15:08:31,675][energy][INFO] - + prefill_iteration_10 energy consumption:
121
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + CPU: 0.000427 (kWh)
122
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + GPU: 0.002319 (kWh)
123
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + RAM: 0.000004 (kWh)
124
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + total: 0.002751 (kWh)
125
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + decode energy consumption:
126
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + CPU: 0.002158 (kWh)
127
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + GPU: 0.004664 (kWh)
128
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + RAM: 0.000022 (kWh)
129
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + total: 0.006843 (kWh)
130
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + decode_iteration_1 energy consumption:
131
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + CPU: 0.002401 (kWh)
132
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + GPU: 0.005294 (kWh)
133
+ [PROC-0][2024-10-24 15:08:31,676][energy][INFO] - + RAM: 0.000024 (kWh)
134
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + total: 0.007719 (kWh)
135
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + decode_iteration_2 energy consumption:
136
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + CPU: 0.002399 (kWh)
137
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + GPU: 0.005170 (kWh)
138
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + RAM: 0.000024 (kWh)
139
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + total: 0.007594 (kWh)
140
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + decode_iteration_3 energy consumption:
141
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + CPU: 0.002399 (kWh)
142
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + GPU: 0.005181 (kWh)
143
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + RAM: 0.000024 (kWh)
144
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + total: 0.007605 (kWh)
145
+ [PROC-0][2024-10-24 15:08:31,677][energy][INFO] - + decode_iteration_4 energy consumption:
146
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + CPU: 0.002397 (kWh)
147
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + GPU: 0.005158 (kWh)
148
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + RAM: 0.000024 (kWh)
149
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + total: 0.007579 (kWh)
150
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + decode_iteration_5 energy consumption:
151
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + CPU: -0.000427 (kWh)
152
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + GPU: -0.002321 (kWh)
153
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + RAM: -0.000004 (kWh)
154
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + total: -0.002752 (kWh)
155
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + decode_iteration_6 energy consumption:
156
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + CPU: 0.002387 (kWh)
157
+ [PROC-0][2024-10-24 15:08:31,678][energy][INFO] - + GPU: 0.005154 (kWh)
158
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + RAM: 0.000024 (kWh)
159
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + total: 0.007566 (kWh)
160
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + decode_iteration_7 energy consumption:
161
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + CPU: 0.002826 (kWh)
162
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + GPU: 0.007497 (kWh)
163
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + RAM: 0.000029 (kWh)
164
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + total: 0.010351 (kWh)
165
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + decode_iteration_8 energy consumption:
166
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + CPU: 0.002403 (kWh)
167
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + GPU: 0.005181 (kWh)
168
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + RAM: 0.000025 (kWh)
169
+ [PROC-0][2024-10-24 15:08:31,679][energy][INFO] - + total: 0.007608 (kWh)
170
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + decode_iteration_9 energy consumption:
171
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + CPU: 0.002401 (kWh)
172
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + GPU: 0.005174 (kWh)
173
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + RAM: 0.000024 (kWh)
174
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + total: 0.007600 (kWh)
175
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + decode_iteration_10 energy consumption:
176
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + CPU: 0.002389 (kWh)
177
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + GPU: 0.005151 (kWh)
178
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + RAM: 0.000024 (kWh)
179
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + total: 0.007564 (kWh)
180
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + preprocess energy consumption:
181
+ [PROC-0][2024-10-24 15:08:31,680][energy][INFO] - + CPU: 0.000011 (kWh)
182
+ [PROC-0][2024-10-24 15:08:31,681][energy][INFO] - + GPU: 0.000017 (kWh)
183
+ [PROC-0][2024-10-24 15:08:31,681][energy][INFO] - + RAM: 0.000000 (kWh)
184
+ [PROC-0][2024-10-24 15:08:31,681][energy][INFO] - + total: 0.000027 (kWh)
185
+ [PROC-0][2024-10-24 15:08:31,681][energy][INFO] - + prefill energy efficiency: 124481811.635465 (tokens/kWh)
186
+ [PROC-0][2024-10-24 15:08:31,681][energy][INFO] - + decode energy efficiency: 1315130.308156 (tokens/kWh)
187
+ [PROC-0][2024-10-24 15:08:31,681][energy][INFO] - + preprocess energy efficiency: 36616831.974803 (samples/kWh)
188
+ [2024-10-24 15:08:32,387][datasets][INFO] - PyTorch version 2.4.0 available.
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/error.log ADDED
The diff for this file is too large to render. See raw diff
 
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/experiment_config.json ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "experiment_name": "text_generation",
3
+ "backend": {
4
+ "name": "pytorch",
5
+ "version": "2.4.0",
6
+ "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
7
+ "task": "text-generation",
8
+ "model": "HuggingFaceTB/SmolLM-360M",
9
+ "processor": "HuggingFaceTB/SmolLM-360M",
10
+ "library": "transformers",
11
+ "device": "cuda",
12
+ "device_ids": "0",
13
+ "seed": 42,
14
+ "inter_op_num_threads": null,
15
+ "intra_op_num_threads": null,
16
+ "hub_kwargs": {
17
+ "revision": "main",
18
+ "force_download": false,
19
+ "local_files_only": false,
20
+ "trust_remote_code": true
21
+ },
22
+ "no_weights": true,
23
+ "device_map": null,
24
+ "torch_dtype": null,
25
+ "amp_autocast": false,
26
+ "amp_dtype": null,
27
+ "eval_mode": true,
28
+ "to_bettertransformer": false,
29
+ "low_cpu_mem_usage": null,
30
+ "attn_implementation": null,
31
+ "cache_implementation": null,
32
+ "torch_compile": false,
33
+ "torch_compile_config": {},
34
+ "quantization_scheme": null,
35
+ "quantization_config": {},
36
+ "deepspeed_inference": false,
37
+ "deepspeed_inference_config": {},
38
+ "peft_type": null,
39
+ "peft_config": {}
40
+ },
41
+ "launcher": {
42
+ "name": "process",
43
+ "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
44
+ "device_isolation": false,
45
+ "device_isolation_action": "warn",
46
+ "start_method": "spawn"
47
+ },
48
+ "benchmark": {
49
+ "name": "energy_star",
50
+ "_target_": "optimum_benchmark.benchmarks.energy_star.benchmark.EnergyStarBenchmark",
51
+ "dataset_name": "EnergyStarAI/text_generation",
52
+ "dataset_config": "",
53
+ "dataset_split": "train",
54
+ "num_samples": 1000,
55
+ "input_shapes": {
56
+ "batch_size": 1
57
+ },
58
+ "text_column_name": "text",
59
+ "truncation": true,
60
+ "max_length": -1,
61
+ "dataset_prefix1": "",
62
+ "dataset_prefix2": "",
63
+ "t5_task": "",
64
+ "image_column_name": "image",
65
+ "resize": false,
66
+ "question_column_name": "question",
67
+ "context_column_name": "context",
68
+ "sentence1_column_name": "sentence1",
69
+ "sentence2_column_name": "sentence2",
70
+ "audio_column_name": "audio",
71
+ "iterations": 10,
72
+ "warmup_runs": 10,
73
+ "energy": true,
74
+ "forward_kwargs": {},
75
+ "generate_kwargs": {
76
+ "max_new_tokens": 10,
77
+ "min_new_tokens": 10
78
+ },
79
+ "call_kwargs": {}
80
+ },
81
+ "environment": {
82
+ "cpu": " AMD EPYC 7R32",
83
+ "cpu_count": 48,
84
+ "cpu_ram_mb": 200472.73984,
85
+ "system": "Linux",
86
+ "machine": "x86_64",
87
+ "platform": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
88
+ "processor": "x86_64",
89
+ "python_version": "3.9.20",
90
+ "gpu": [
91
+ "NVIDIA A10G"
92
+ ],
93
+ "gpu_count": 1,
94
+ "gpu_vram_mb": 24146608128,
95
+ "optimum_benchmark_version": "0.2.0",
96
+ "optimum_benchmark_commit": null,
97
+ "transformers_version": "4.44.0",
98
+ "transformers_commit": null,
99
+ "accelerate_version": "0.33.0",
100
+ "accelerate_commit": null,
101
+ "diffusers_version": "0.30.0",
102
+ "diffusers_commit": null,
103
+ "optimum_version": null,
104
+ "optimum_commit": null,
105
+ "timm_version": null,
106
+ "timm_commit": null,
107
+ "peft_version": null,
108
+ "peft_commit": null
109
+ }
110
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/generate_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-24T15:08:31",
3
+ "project_name": "codecarbon",
4
+ "run_id": "acdaf5b9-c8a0-438e-95f7-e1c02c020a0d",
5
+ "duration": -1729710332.8090625,
6
+ "emissions": 0.0038076148714655256,
7
+ "emissions_rate": 1.5963324039613928e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 112.74933364951919,
10
+ "ram_power": 0.4335465431213379,
11
+ "cpu_energy": 0.0028159153997137955,
12
+ "gpu_energy": 0.007470342642936112,
13
+ "ram_energy": 2.8725062479746048e-05,
14
+ "energy_consumed": 0.010314983105129655,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }
runs/text_generation/a10g-large/HuggingFaceTB/SmolLM-360M/2024-10-24-14-22-22/prefill_codecarbon.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "timestamp": "2024-10-24T14:28:39",
3
+ "project_name": "codecarbon",
4
+ "run_id": "acdaf5b9-c8a0-438e-95f7-e1c02c020a0d",
5
+ "duration": -1729710535.1662798,
6
+ "emissions": 0.001015405954926966,
7
+ "emissions_rate": 2.8076700900487555e-05,
8
+ "cpu_power": 42.5,
9
+ "gpu_power": 230.88594610407003,
10
+ "ram_power": 0.43345069885253906,
11
+ "cpu_energy": 0.00042697586572922833,
12
+ "gpu_energy": 0.002319445744444093,
13
+ "ram_energy": 4.35430164792082e-06,
14
+ "energy_consumed": 0.0027507759118212405,
15
+ "country_name": "United States",
16
+ "country_iso_code": "USA",
17
+ "region": "virginia",
18
+ "cloud_provider": "",
19
+ "cloud_region": "",
20
+ "os": "Linux-5.10.192-183.736.amzn2.x86_64-x86_64-with-glibc2.35",
21
+ "python_version": "3.9.20",
22
+ "codecarbon_version": "2.5.1",
23
+ "cpu_count": 48,
24
+ "cpu_model": "AMD EPYC 7R32",
25
+ "gpu_count": 1,
26
+ "gpu_model": "1 x NVIDIA A10G",
27
+ "longitude": -77.4903,
28
+ "latitude": 39.0469,
29
+ "ram_total_size": 186.7047882080078,
30
+ "tracking_mode": "process",
31
+ "on_cloud": "N",
32
+ "pue": 1.0
33
+ }