ydshieh commited on
Commit
ca97cde
1 Parent(s): b1c34ff
run_image_captioning_flax_reduced.py CHANGED
@@ -1243,16 +1243,16 @@ def main():
1243
  state, train_metric = p_train_step(state, batch)
1244
  train_metrics.append(train_metric)
1245
  train_time += time.time() - batch_start
1246
-
1247
  time_per_step = train_time / cur_step
1248
- _train_metric = unreplicate(train_metric)
1249
- desc = f"Epoch... ({epoch + 1}/{num_epochs} | Step: {cur_step} | Loss: {_train_metric['loss']} | Learning Rate: {_train_metric['learning_rate']} | Time per step: {time_per_step})"
1250
- epochs.desc = desc
1251
- epochs.write(desc)
1252
 
1253
  # log and save info
1254
  if training_args.logging_steps > 0 and cur_step % training_args.logging_steps == 0:
1255
 
 
 
 
 
 
1256
  logger.info(desc)
1257
 
1258
  with open(os.path.join(training_args.output_dir, "log"), "a", encoding="UTF-8") as fp:
1243
  state, train_metric = p_train_step(state, batch)
1244
  train_metrics.append(train_metric)
1245
  train_time += time.time() - batch_start
 
1246
  time_per_step = train_time / cur_step
 
 
 
 
1247
 
1248
  # log and save info
1249
  if training_args.logging_steps > 0 and cur_step % training_args.logging_steps == 0:
1250
 
1251
+ _train_metric = unreplicate(train_metric)
1252
+ desc = f"Epoch... ({epoch + 1}/{num_epochs} | Step: {cur_step} | Loss: {_train_metric['loss']} | Learning Rate: {_train_metric['learning_rate']} | Time per step: {time_per_step})"
1253
+ epochs.desc = desc
1254
+ epochs.write(desc)
1255
+
1256
  logger.info(desc)
1257
 
1258
  with open(os.path.join(training_args.output_dir, "log"), "a", encoding="UTF-8") as fp: