rdiehlmartinez commited on
Commit
bc74a1c
·
1 Parent(s): 70a6425

updating _generate_examples to take in checkpoint and grad step kwargs

Browse files
Files changed (1) hide show
  1. pythia-training-metrics.py +17 -10
pythia-training-metrics.py CHANGED
@@ -73,6 +73,9 @@ class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
73
 
74
  model_size_to_fp = { model_size: [] for model_size in self.MODEL_SIZES }
75
 
 
 
 
76
  checkpoint_steps = [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1000, ]
77
  checkpoint_steps.extend([3000 + (i * 10000) for i in range(0, 15)])
78
 
@@ -89,14 +92,20 @@ class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
89
 
90
  if self.config.name == "activations":
91
  model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_activations.pickle")
 
92
  elif self.config.name == "weights":
93
  model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_weights.pickle")
 
94
  elif self.config.name == "gradients":
95
  for gradient_step in get_gradient_step(checkpoint_step):
96
  model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_{gradient_step}.pickle")
 
 
97
  elif self.config.name == "gradients_mini":
98
  for gradient_step in get_gradient_step(checkpoint_step)[:2]:
99
  model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_mini_{gradient_step}.pickle")
 
 
100
  else:
101
  raise Exception("Invalid config name")
102
 
@@ -106,12 +115,14 @@ class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
106
  datasets.SplitGenerator(
107
  name=model_size_name,
108
  gen_kwargs={
109
- "filepaths": downloaded_fps
 
 
110
  }
111
  ) for model_size_name, downloaded_fps in downloaded_files.items()
112
  ]
113
 
114
- def _generate_examples(self, filepaths):
115
 
116
  # the filepaths should be a list of filepaths
117
  if isinstance(filepaths, str):
@@ -119,19 +130,15 @@ class PythiaTrainingMetrics(datasets.GeneratorBasedBuilder):
119
 
120
  global_idx = 0 # the unique identifier for the example
121
 
122
- for filepath in filepaths:
123
  with open(filepath, 'rb') as f:
124
  data = pickle.load(f)
125
-
126
- # extract checkpoint step from the filepath
127
- checkpoint_step = int(filepath.split("/")[-2].split("_")[-1])
128
-
129
  if self.config.name in ["activations", "weights"]:
130
  for layer_name, layer_data in data.items():
131
- yield global_idx, {"checkpoint_step": checkpoint_step, "layer_name": layer_name, "data": layer_data}
132
  global_idx += 1
133
  elif self.config.name in ["gradients", "gradients_mini"]:
134
- gradient_step = int(filepath.split('/')[-1].split("_")[-1].split(".")[0])
135
  for layer_name, layer_data in data.items():
136
- yield global_idx, {"checkpoint_step": checkpoint_step, "layer_name": layer_name, "gradient_step": gradient_step, "data": layer_data}
137
  global_idx += 1
 
73
 
74
  model_size_to_fp = { model_size: [] for model_size in self.MODEL_SIZES }
75
 
76
+ kwargs_checkpoint_steps = []
77
+ kwargs_gradient_steps = []
78
+
79
  checkpoint_steps = [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1000, ]
80
  checkpoint_steps.extend([3000 + (i * 10000) for i in range(0, 15)])
81
 
 
92
 
93
  if self.config.name == "activations":
94
  model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_activations.pickle")
95
+ kwargs_checkpoint_steps.append(checkpoint_step)
96
  elif self.config.name == "weights":
97
  model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_weights.pickle")
98
+ kwargs_checkpoint_steps.append(checkpoint_step)
99
  elif self.config.name == "gradients":
100
  for gradient_step in get_gradient_step(checkpoint_step):
101
  model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_{gradient_step}.pickle")
102
+ kwargs_checkpoint_steps.append(checkpoint_step)
103
+ kwargs_gradient_steps.append(gradient_step)
104
  elif self.config.name == "gradients_mini":
105
  for gradient_step in get_gradient_step(checkpoint_step)[:2]:
106
  model_size_to_fp[model_size].append(f"{directory_path}/checkpoint_gradients_mini_{gradient_step}.pickle")
107
+ kwargs_checkpoint_steps.append(checkpoint_step)
108
+ kwargs_gradient_steps.append(gradient_step)
109
  else:
110
  raise Exception("Invalid config name")
111
 
 
115
  datasets.SplitGenerator(
116
  name=model_size_name,
117
  gen_kwargs={
118
+ "filepaths": downloaded_fps,
119
+ "checkpoint_steps": kwargs_checkpoint_steps,
120
+ "gradient_steps": kwargs_gradient_steps,
121
  }
122
  ) for model_size_name, downloaded_fps in downloaded_files.items()
123
  ]
124
 
125
+ def _generate_examples(self, filepaths, checkpoint_steps, gradient_steps):
126
 
127
  # the filepaths should be a list of filepaths
128
  if isinstance(filepaths, str):
 
130
 
131
  global_idx = 0 # the unique identifier for the example
132
 
133
+ for idx, filepath in enumerate(filepaths):
134
  with open(filepath, 'rb') as f:
135
  data = pickle.load(f)
136
+
 
 
 
137
  if self.config.name in ["activations", "weights"]:
138
  for layer_name, layer_data in data.items():
139
+ yield global_idx, {"checkpoint_step": checkpoint_steps[idx], "layer_name": layer_name, "data": layer_data}
140
  global_idx += 1
141
  elif self.config.name in ["gradients", "gradients_mini"]:
 
142
  for layer_name, layer_data in data.items():
143
+ yield global_idx, {"checkpoint_step": checkpoint_steps[idx], "layer_name": layer_name, "gradient_step": gradient_steps[idx], "data": layer_data}
144
  global_idx += 1