diff --git "a/baseline/log/log-train-2022-05-27-13-57-22-1" "b/baseline/log/log-train-2022-05-27-13-57-22-1" new file mode 100644--- /dev/null +++ "b/baseline/log/log-train-2022-05-27-13-57-22-1" @@ -0,0 +1,982 @@ +2022-05-27 13:57:22,682 INFO [train.py:887] (1/4) Training started +2022-05-27 13:57:22,683 INFO [train.py:897] (1/4) Device: cuda:1 +2022-05-27 13:57:22,685 INFO [train.py:906] (1/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 1600, 'feature_dim': 80, 'subsampling_factor': 4, 'encoder_dim': 512, 'nhead': 8, 'dim_feedforward': 2048, 'num_encoder_layers': 12, 'decoder_dim': 512, 'joiner_dim': 512, 'model_warm_step': 3000, 'env_info': {'k2-version': '1.13', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f4fefe4882bc0ae59af951da3f47335d5495ef71', 'k2-git-date': 'Thu Feb 10 15:16:02 2022', 'lhotse-version': '1.1.0', 'torch-version': '1.10.0+cu102', 'torch-cuda-available': True, 'torch-cuda-version': '10.2', 'python-version': '3.8', 'icefall-git-branch': 'stateless6', 'icefall-git-sha1': '50641cd-dirty', 'icefall-git-date': 'Fri May 27 13:49:39 2022', 'icefall-path': '/ceph-data2/ly/open_source/vq2_icefall', 'k2-path': '/ceph-jb/yaozengwei/workspace/rnnt/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-ly/open-source/hubert/lhotse/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-9-0425111216-65f66bdf4-bkrql', 'IP address': '10.177.77.9'}, 'enable_distiallation': False, 'distillation_layer': 5, 'num_codebooks': 16, 'world_size': 4, 'master_port': 12359, 'tensorboard': True, 'num_epochs': 50, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless6/exp'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'initial_lr': 0.003, 'lr_batches': 5000, 'lr_epochs': 6, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'codebook_loss_scale': 0.1, 'seed': 42, 'print_diagnostics': False, 'save_every_n': 8000, 'keep_last_k': 20, 'average_period': 100, 'use_fp16': False, 'full_libri': False, 'manifest_dir': PosixPath('data/vq_fbank'), 'max_duration': 300, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': -1, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2022-05-27 13:57:22,685 INFO [train.py:908] (1/4) About to create model +2022-05-27 13:57:23,172 INFO [train.py:912] (1/4) Number of model parameters: 78648040 +2022-05-27 13:57:28,425 INFO [train.py:927] (1/4) Using DDP +2022-05-27 13:57:29,321 INFO [asr_datamodule.py:408] (1/4) About to get train-clean-100 cuts +2022-05-27 13:57:39,127 INFO [asr_datamodule.py:225] (1/4) Enable MUSAN +2022-05-27 13:57:39,127 INFO [asr_datamodule.py:226] (1/4) About to get Musan cuts +2022-05-27 13:57:42,710 INFO [asr_datamodule.py:254] (1/4) Enable SpecAugment +2022-05-27 13:57:42,710 INFO [asr_datamodule.py:255] (1/4) Time warp factor: -1 +2022-05-27 13:57:42,710 INFO [asr_datamodule.py:267] (1/4) Num frame mask: 10 +2022-05-27 13:57:42,710 INFO [asr_datamodule.py:280] (1/4) About to create train dataset +2022-05-27 13:57:42,711 INFO [asr_datamodule.py:309] (1/4) Using BucketingSampler. +2022-05-27 13:57:43,048 INFO [asr_datamodule.py:325] (1/4) About to create train dataloader +2022-05-27 13:57:43,050 INFO [asr_datamodule.py:429] (1/4) About to get dev-clean cuts +2022-05-27 13:57:43,219 INFO [asr_datamodule.py:434] (1/4) About to get dev-other cuts +2022-05-27 13:57:43,359 INFO [asr_datamodule.py:356] (1/4) About to create dev dataset +2022-05-27 13:57:43,370 INFO [asr_datamodule.py:375] (1/4) About to create dev dataloader +2022-05-27 13:57:43,370 INFO [train.py:1054] (1/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2022-05-27 13:57:47,488 INFO [distributed.py:874] (1/4) Reducer buckets have been rebuilt in this iteration. +2022-05-27 13:58:01,537 INFO [train.py:823] (1/4) Epoch 1, batch 0, loss[loss=0.8781, simple_loss=1.756, pruned_loss=6.804, over 7285.00 frames.], tot_loss[loss=0.8781, simple_loss=1.756, pruned_loss=6.804, over 7285.00 frames.], batch size: 21, lr: 3.00e-03 +2022-05-27 13:58:40,753 INFO [train.py:823] (1/4) Epoch 1, batch 50, loss[loss=0.5528, simple_loss=1.106, pruned_loss=7.222, over 7171.00 frames.], tot_loss[loss=0.5736, simple_loss=1.147, pruned_loss=7.165, over 322696.48 frames.], batch size: 23, lr: 3.00e-03 +2022-05-27 13:59:20,056 INFO [train.py:823] (1/4) Epoch 1, batch 100, loss[loss=0.4473, simple_loss=0.8946, pruned_loss=6.992, over 7186.00 frames.], tot_loss[loss=0.5141, simple_loss=1.028, pruned_loss=7.072, over 564199.28 frames.], batch size: 20, lr: 3.00e-03 +2022-05-27 13:59:59,621 INFO [train.py:823] (1/4) Epoch 1, batch 150, loss[loss=0.4336, simple_loss=0.8671, pruned_loss=6.764, over 7334.00 frames.], tot_loss[loss=0.4788, simple_loss=0.9575, pruned_loss=6.975, over 754187.93 frames.], batch size: 23, lr: 3.00e-03 +2022-05-27 14:00:39,041 INFO [train.py:823] (1/4) Epoch 1, batch 200, loss[loss=0.4224, simple_loss=0.8448, pruned_loss=6.782, over 7280.00 frames.], tot_loss[loss=0.4577, simple_loss=0.9154, pruned_loss=6.902, over 903302.93 frames.], batch size: 19, lr: 3.00e-03 +2022-05-27 14:01:18,152 INFO [train.py:823] (1/4) Epoch 1, batch 250, loss[loss=0.3792, simple_loss=0.7583, pruned_loss=6.6, over 7286.00 frames.], tot_loss[loss=0.4418, simple_loss=0.8837, pruned_loss=6.828, over 1014857.97 frames.], batch size: 17, lr: 3.00e-03 +2022-05-27 14:01:57,499 INFO [train.py:823] (1/4) Epoch 1, batch 300, loss[loss=0.4259, simple_loss=0.8518, pruned_loss=6.688, over 7236.00 frames.], tot_loss[loss=0.4294, simple_loss=0.8588, pruned_loss=6.778, over 1106254.47 frames.], batch size: 24, lr: 3.00e-03 +2022-05-27 14:02:36,747 INFO [train.py:823] (1/4) Epoch 1, batch 350, loss[loss=0.4379, simple_loss=0.8757, pruned_loss=6.712, over 6570.00 frames.], tot_loss[loss=0.4191, simple_loss=0.8383, pruned_loss=6.743, over 1177194.33 frames.], batch size: 34, lr: 3.00e-03 +2022-05-27 14:03:16,134 INFO [train.py:823] (1/4) Epoch 1, batch 400, loss[loss=0.4061, simple_loss=0.8123, pruned_loss=6.72, over 4958.00 frames.], tot_loss[loss=0.4096, simple_loss=0.8192, pruned_loss=6.716, over 1228214.42 frames.], batch size: 46, lr: 3.00e-03 +2022-05-27 14:03:55,412 INFO [train.py:823] (1/4) Epoch 1, batch 450, loss[loss=0.3302, simple_loss=0.6604, pruned_loss=6.523, over 7192.00 frames.], tot_loss[loss=0.3961, simple_loss=0.7922, pruned_loss=6.699, over 1274243.69 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:04:34,521 INFO [train.py:823] (1/4) Epoch 1, batch 500, loss[loss=0.3243, simple_loss=0.6486, pruned_loss=6.567, over 7390.00 frames.], tot_loss[loss=0.3809, simple_loss=0.7617, pruned_loss=6.693, over 1308662.69 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:05:13,707 INFO [train.py:823] (1/4) Epoch 1, batch 550, loss[loss=0.3209, simple_loss=0.6418, pruned_loss=6.812, over 7194.00 frames.], tot_loss[loss=0.3643, simple_loss=0.7285, pruned_loss=6.691, over 1330211.74 frames.], batch size: 25, lr: 2.99e-03 +2022-05-27 14:05:53,121 INFO [train.py:823] (1/4) Epoch 1, batch 600, loss[loss=0.2858, simple_loss=0.5716, pruned_loss=6.754, over 7289.00 frames.], tot_loss[loss=0.3467, simple_loss=0.6934, pruned_loss=6.683, over 1346921.03 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:06:31,943 INFO [train.py:823] (1/4) Epoch 1, batch 650, loss[loss=0.2511, simple_loss=0.5022, pruned_loss=6.651, over 7102.00 frames.], tot_loss[loss=0.3311, simple_loss=0.6622, pruned_loss=6.681, over 1360973.52 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:07:11,383 INFO [train.py:823] (1/4) Epoch 1, batch 700, loss[loss=0.2186, simple_loss=0.4373, pruned_loss=6.551, over 7168.00 frames.], tot_loss[loss=0.3151, simple_loss=0.6302, pruned_loss=6.679, over 1372814.37 frames.], batch size: 17, lr: 2.99e-03 +2022-05-27 14:07:50,398 INFO [train.py:823] (1/4) Epoch 1, batch 750, loss[loss=0.2533, simple_loss=0.5065, pruned_loss=6.656, over 6852.00 frames.], tot_loss[loss=0.3024, simple_loss=0.6048, pruned_loss=6.687, over 1385970.93 frames.], batch size: 15, lr: 2.98e-03 +2022-05-27 14:08:29,948 INFO [train.py:823] (1/4) Epoch 1, batch 800, loss[loss=0.2665, simple_loss=0.533, pruned_loss=6.791, over 7152.00 frames.], tot_loss[loss=0.2896, simple_loss=0.5792, pruned_loss=6.689, over 1391606.50 frames.], batch size: 23, lr: 2.98e-03 +2022-05-27 14:09:08,827 INFO [train.py:823] (1/4) Epoch 1, batch 850, loss[loss=0.2161, simple_loss=0.4323, pruned_loss=6.592, over 6996.00 frames.], tot_loss[loss=0.2799, simple_loss=0.5598, pruned_loss=6.697, over 1399711.89 frames.], batch size: 16, lr: 2.98e-03 +2022-05-27 14:09:47,676 INFO [train.py:823] (1/4) Epoch 1, batch 900, loss[loss=0.2197, simple_loss=0.4394, pruned_loss=6.689, over 7296.00 frames.], tot_loss[loss=0.271, simple_loss=0.542, pruned_loss=6.708, over 1402674.48 frames.], batch size: 17, lr: 2.98e-03 +2022-05-27 14:10:41,067 INFO [train.py:823] (1/4) Epoch 2, batch 0, loss[loss=0.2229, simple_loss=0.4458, pruned_loss=6.674, over 7099.00 frames.], tot_loss[loss=0.2229, simple_loss=0.4458, pruned_loss=6.674, over 7099.00 frames.], batch size: 19, lr: 2.95e-03 +2022-05-27 14:11:20,502 INFO [train.py:823] (1/4) Epoch 2, batch 50, loss[loss=0.2192, simple_loss=0.4384, pruned_loss=6.714, over 7378.00 frames.], tot_loss[loss=0.2296, simple_loss=0.4592, pruned_loss=6.722, over 323389.42 frames.], batch size: 21, lr: 2.95e-03 +2022-05-27 14:11:59,974 INFO [train.py:823] (1/4) Epoch 2, batch 100, loss[loss=0.2557, simple_loss=0.5113, pruned_loss=6.799, over 7017.00 frames.], tot_loss[loss=0.2266, simple_loss=0.4532, pruned_loss=6.728, over 565808.13 frames.], batch size: 26, lr: 2.95e-03 +2022-05-27 14:12:39,348 INFO [train.py:823] (1/4) Epoch 2, batch 150, loss[loss=0.2021, simple_loss=0.4041, pruned_loss=6.678, over 7302.00 frames.], tot_loss[loss=0.2253, simple_loss=0.4507, pruned_loss=6.736, over 758274.69 frames.], batch size: 17, lr: 2.94e-03 +2022-05-27 14:13:18,809 INFO [train.py:823] (1/4) Epoch 2, batch 200, loss[loss=0.1885, simple_loss=0.377, pruned_loss=6.742, over 7101.00 frames.], tot_loss[loss=0.2223, simple_loss=0.4445, pruned_loss=6.737, over 906003.51 frames.], batch size: 18, lr: 2.94e-03 +2022-05-27 14:13:57,999 INFO [train.py:823] (1/4) Epoch 2, batch 250, loss[loss=0.2061, simple_loss=0.4122, pruned_loss=6.631, over 7157.00 frames.], tot_loss[loss=0.2197, simple_loss=0.4395, pruned_loss=6.731, over 1016735.29 frames.], batch size: 17, lr: 2.94e-03 +2022-05-27 14:14:37,453 INFO [train.py:823] (1/4) Epoch 2, batch 300, loss[loss=0.2076, simple_loss=0.4151, pruned_loss=6.623, over 7014.00 frames.], tot_loss[loss=0.2183, simple_loss=0.4365, pruned_loss=6.739, over 1107352.31 frames.], batch size: 16, lr: 2.93e-03 +2022-05-27 14:15:20,498 INFO [train.py:823] (1/4) Epoch 2, batch 350, loss[loss=0.2134, simple_loss=0.4267, pruned_loss=6.758, over 7158.00 frames.], tot_loss[loss=0.2167, simple_loss=0.4335, pruned_loss=6.746, over 1174985.96 frames.], batch size: 23, lr: 2.93e-03 +2022-05-27 14:15:59,767 INFO [train.py:823] (1/4) Epoch 2, batch 400, loss[loss=0.1903, simple_loss=0.3805, pruned_loss=6.756, over 7099.00 frames.], tot_loss[loss=0.2148, simple_loss=0.4295, pruned_loss=6.744, over 1225844.95 frames.], batch size: 18, lr: 2.93e-03 +2022-05-27 14:16:38,889 INFO [train.py:823] (1/4) Epoch 2, batch 450, loss[loss=0.21, simple_loss=0.4199, pruned_loss=6.733, over 7294.00 frames.], tot_loss[loss=0.2129, simple_loss=0.4257, pruned_loss=6.75, over 1265480.19 frames.], batch size: 21, lr: 2.92e-03 +2022-05-27 14:17:18,182 INFO [train.py:823] (1/4) Epoch 2, batch 500, loss[loss=0.1966, simple_loss=0.3932, pruned_loss=6.702, over 6986.00 frames.], tot_loss[loss=0.211, simple_loss=0.422, pruned_loss=6.754, over 1301602.75 frames.], batch size: 29, lr: 2.92e-03 +2022-05-27 14:17:57,123 INFO [train.py:823] (1/4) Epoch 2, batch 550, loss[loss=0.2119, simple_loss=0.4238, pruned_loss=6.766, over 4913.00 frames.], tot_loss[loss=0.2087, simple_loss=0.4175, pruned_loss=6.751, over 1323931.28 frames.], batch size: 46, lr: 2.92e-03 +2022-05-27 14:18:36,755 INFO [train.py:823] (1/4) Epoch 2, batch 600, loss[loss=0.2303, simple_loss=0.4606, pruned_loss=6.855, over 7279.00 frames.], tot_loss[loss=0.207, simple_loss=0.414, pruned_loss=6.75, over 1340833.71 frames.], batch size: 21, lr: 2.91e-03 +2022-05-27 14:19:16,324 INFO [train.py:823] (1/4) Epoch 2, batch 650, loss[loss=0.2145, simple_loss=0.4291, pruned_loss=6.838, over 7297.00 frames.], tot_loss[loss=0.2064, simple_loss=0.4128, pruned_loss=6.76, over 1358665.66 frames.], batch size: 22, lr: 2.91e-03 +2022-05-27 14:19:56,885 INFO [train.py:823] (1/4) Epoch 2, batch 700, loss[loss=0.1893, simple_loss=0.3787, pruned_loss=6.722, over 7016.00 frames.], tot_loss[loss=0.2047, simple_loss=0.4095, pruned_loss=6.761, over 1373865.15 frames.], batch size: 17, lr: 2.90e-03 +2022-05-27 14:20:36,641 INFO [train.py:823] (1/4) Epoch 2, batch 750, loss[loss=0.2148, simple_loss=0.4295, pruned_loss=6.889, over 7112.00 frames.], tot_loss[loss=0.2032, simple_loss=0.4064, pruned_loss=6.765, over 1381256.23 frames.], batch size: 20, lr: 2.90e-03 +2022-05-27 14:21:16,497 INFO [train.py:823] (1/4) Epoch 2, batch 800, loss[loss=0.2025, simple_loss=0.4049, pruned_loss=6.857, over 5356.00 frames.], tot_loss[loss=0.202, simple_loss=0.4041, pruned_loss=6.766, over 1387930.39 frames.], batch size: 48, lr: 2.89e-03 +2022-05-27 14:21:57,195 INFO [train.py:823] (1/4) Epoch 2, batch 850, loss[loss=0.1974, simple_loss=0.3949, pruned_loss=6.742, over 7194.00 frames.], tot_loss[loss=0.2005, simple_loss=0.4009, pruned_loss=6.763, over 1391896.02 frames.], batch size: 20, lr: 2.89e-03 +2022-05-27 14:22:36,473 INFO [train.py:823] (1/4) Epoch 2, batch 900, loss[loss=0.1583, simple_loss=0.3166, pruned_loss=6.631, over 7298.00 frames.], tot_loss[loss=0.1992, simple_loss=0.3983, pruned_loss=6.769, over 1395660.60 frames.], batch size: 18, lr: 2.89e-03 +2022-05-27 14:23:29,962 INFO [train.py:823] (1/4) Epoch 3, batch 0, loss[loss=0.1771, simple_loss=0.3543, pruned_loss=6.643, over 7300.00 frames.], tot_loss[loss=0.1771, simple_loss=0.3543, pruned_loss=6.643, over 7300.00 frames.], batch size: 17, lr: 2.83e-03 +2022-05-27 14:24:09,552 INFO [train.py:823] (1/4) Epoch 3, batch 50, loss[loss=0.1981, simple_loss=0.3962, pruned_loss=6.711, over 4837.00 frames.], tot_loss[loss=0.1875, simple_loss=0.3751, pruned_loss=6.752, over 319422.96 frames.], batch size: 46, lr: 2.82e-03 +2022-05-27 14:24:48,966 INFO [train.py:823] (1/4) Epoch 3, batch 100, loss[loss=0.215, simple_loss=0.4299, pruned_loss=6.815, over 6907.00 frames.], tot_loss[loss=0.1878, simple_loss=0.3756, pruned_loss=6.759, over 565648.31 frames.], batch size: 26, lr: 2.82e-03 +2022-05-27 14:25:28,608 INFO [train.py:823] (1/4) Epoch 3, batch 150, loss[loss=0.2109, simple_loss=0.4217, pruned_loss=6.801, over 7375.00 frames.], tot_loss[loss=0.1878, simple_loss=0.3757, pruned_loss=6.767, over 756699.81 frames.], batch size: 20, lr: 2.81e-03 +2022-05-27 14:26:08,174 INFO [train.py:823] (1/4) Epoch 3, batch 200, loss[loss=0.1811, simple_loss=0.3623, pruned_loss=6.695, over 7103.00 frames.], tot_loss[loss=0.1869, simple_loss=0.3739, pruned_loss=6.762, over 908234.17 frames.], batch size: 20, lr: 2.81e-03 +2022-05-27 14:26:47,670 INFO [train.py:823] (1/4) Epoch 3, batch 250, loss[loss=0.1906, simple_loss=0.3813, pruned_loss=6.93, over 6992.00 frames.], tot_loss[loss=0.1878, simple_loss=0.3755, pruned_loss=6.772, over 1026110.13 frames.], batch size: 26, lr: 2.80e-03 +2022-05-27 14:27:26,951 INFO [train.py:823] (1/4) Epoch 3, batch 300, loss[loss=0.1761, simple_loss=0.3522, pruned_loss=6.78, over 7387.00 frames.], tot_loss[loss=0.1883, simple_loss=0.3766, pruned_loss=6.785, over 1115753.93 frames.], batch size: 19, lr: 2.80e-03 +2022-05-27 14:28:06,692 INFO [train.py:823] (1/4) Epoch 3, batch 350, loss[loss=0.1993, simple_loss=0.3986, pruned_loss=6.828, over 7340.00 frames.], tot_loss[loss=0.1872, simple_loss=0.3745, pruned_loss=6.789, over 1186118.55 frames.], batch size: 23, lr: 2.79e-03 +2022-05-27 14:28:45,740 INFO [train.py:823] (1/4) Epoch 3, batch 400, loss[loss=0.1746, simple_loss=0.3492, pruned_loss=6.659, over 7296.00 frames.], tot_loss[loss=0.1876, simple_loss=0.3752, pruned_loss=6.788, over 1240273.39 frames.], batch size: 18, lr: 2.79e-03 +2022-05-27 14:29:24,484 INFO [train.py:823] (1/4) Epoch 3, batch 450, loss[loss=0.1811, simple_loss=0.3622, pruned_loss=6.818, over 7171.00 frames.], tot_loss[loss=0.1873, simple_loss=0.3746, pruned_loss=6.791, over 1274525.16 frames.], batch size: 18, lr: 2.78e-03 +2022-05-27 14:30:03,764 INFO [train.py:823] (1/4) Epoch 3, batch 500, loss[loss=0.1571, simple_loss=0.3142, pruned_loss=6.697, over 7306.00 frames.], tot_loss[loss=0.1865, simple_loss=0.373, pruned_loss=6.795, over 1306195.70 frames.], batch size: 18, lr: 2.77e-03 +2022-05-27 14:30:42,917 INFO [train.py:823] (1/4) Epoch 3, batch 550, loss[loss=0.1892, simple_loss=0.3785, pruned_loss=6.693, over 7184.00 frames.], tot_loss[loss=0.1863, simple_loss=0.3726, pruned_loss=6.796, over 1334718.00 frames.], batch size: 21, lr: 2.77e-03 +2022-05-27 14:31:21,883 INFO [train.py:823] (1/4) Epoch 3, batch 600, loss[loss=0.1719, simple_loss=0.3438, pruned_loss=6.713, over 7372.00 frames.], tot_loss[loss=0.1852, simple_loss=0.3703, pruned_loss=6.787, over 1346881.30 frames.], batch size: 20, lr: 2.76e-03 +2022-05-27 14:32:01,056 INFO [train.py:823] (1/4) Epoch 3, batch 650, loss[loss=0.1943, simple_loss=0.3886, pruned_loss=6.774, over 4644.00 frames.], tot_loss[loss=0.1847, simple_loss=0.3695, pruned_loss=6.792, over 1363237.93 frames.], batch size: 46, lr: 2.76e-03 +2022-05-27 14:32:40,557 INFO [train.py:823] (1/4) Epoch 3, batch 700, loss[loss=0.1913, simple_loss=0.3825, pruned_loss=6.894, over 7300.00 frames.], tot_loss[loss=0.1839, simple_loss=0.3679, pruned_loss=6.789, over 1375769.14 frames.], batch size: 22, lr: 2.75e-03 +2022-05-27 14:33:19,791 INFO [train.py:823] (1/4) Epoch 3, batch 750, loss[loss=0.174, simple_loss=0.348, pruned_loss=6.684, over 7183.00 frames.], tot_loss[loss=0.1829, simple_loss=0.3658, pruned_loss=6.792, over 1384075.79 frames.], batch size: 19, lr: 2.75e-03 +2022-05-27 14:33:58,483 INFO [train.py:823] (1/4) Epoch 3, batch 800, loss[loss=0.1866, simple_loss=0.3732, pruned_loss=6.825, over 7411.00 frames.], tot_loss[loss=0.1832, simple_loss=0.3664, pruned_loss=6.793, over 1393907.42 frames.], batch size: 22, lr: 2.74e-03 +2022-05-27 14:34:38,085 INFO [train.py:823] (1/4) Epoch 3, batch 850, loss[loss=0.1818, simple_loss=0.3636, pruned_loss=6.758, over 7090.00 frames.], tot_loss[loss=0.1828, simple_loss=0.3656, pruned_loss=6.797, over 1396140.92 frames.], batch size: 19, lr: 2.74e-03 +2022-05-27 14:35:16,880 INFO [train.py:823] (1/4) Epoch 3, batch 900, loss[loss=0.1944, simple_loss=0.3889, pruned_loss=6.9, over 5196.00 frames.], tot_loss[loss=0.183, simple_loss=0.3661, pruned_loss=6.8, over 1392933.01 frames.], batch size: 47, lr: 2.73e-03 +2022-05-27 14:36:10,826 INFO [train.py:823] (1/4) Epoch 4, batch 0, loss[loss=0.1728, simple_loss=0.3457, pruned_loss=6.753, over 7092.00 frames.], tot_loss[loss=0.1728, simple_loss=0.3457, pruned_loss=6.753, over 7092.00 frames.], batch size: 19, lr: 2.64e-03 +2022-05-27 14:36:49,881 INFO [train.py:823] (1/4) Epoch 4, batch 50, loss[loss=0.1659, simple_loss=0.3319, pruned_loss=6.774, over 7027.00 frames.], tot_loss[loss=0.1732, simple_loss=0.3464, pruned_loss=6.787, over 319529.39 frames.], batch size: 17, lr: 2.64e-03 +2022-05-27 14:37:30,273 INFO [train.py:823] (1/4) Epoch 4, batch 100, loss[loss=0.181, simple_loss=0.3621, pruned_loss=6.857, over 7372.00 frames.], tot_loss[loss=0.1758, simple_loss=0.3515, pruned_loss=6.805, over 564665.90 frames.], batch size: 21, lr: 2.63e-03 +2022-05-27 14:38:10,738 INFO [train.py:823] (1/4) Epoch 4, batch 150, loss[loss=0.1774, simple_loss=0.3548, pruned_loss=6.756, over 7153.00 frames.], tot_loss[loss=0.1756, simple_loss=0.3513, pruned_loss=6.804, over 750479.63 frames.], batch size: 17, lr: 2.63e-03 +2022-05-27 14:38:51,369 INFO [train.py:823] (1/4) Epoch 4, batch 200, loss[loss=0.275, simple_loss=0.347, pruned_loss=1.015, over 7184.00 frames.], tot_loss[loss=0.2729, simple_loss=0.3647, pruned_loss=4.926, over 902771.36 frames.], batch size: 18, lr: 2.62e-03 +2022-05-27 14:39:31,816 INFO [train.py:823] (1/4) Epoch 4, batch 250, loss[loss=0.2518, simple_loss=0.3574, pruned_loss=0.7313, over 7387.00 frames.], tot_loss[loss=0.2704, simple_loss=0.3611, pruned_loss=3.664, over 1021436.50 frames.], batch size: 21, lr: 2.62e-03 +2022-05-27 14:40:11,099 INFO [train.py:823] (1/4) Epoch 4, batch 300, loss[loss=0.2413, simple_loss=0.3838, pruned_loss=0.4935, over 7203.00 frames.], tot_loss[loss=0.2613, simple_loss=0.3608, pruned_loss=2.795, over 1106555.74 frames.], batch size: 20, lr: 2.61e-03 +2022-05-27 14:40:49,899 INFO [train.py:823] (1/4) Epoch 4, batch 350, loss[loss=0.2366, simple_loss=0.3961, pruned_loss=0.3849, over 7142.00 frames.], tot_loss[loss=0.2498, simple_loss=0.3601, pruned_loss=2.157, over 1172353.05 frames.], batch size: 23, lr: 2.60e-03 +2022-05-27 14:41:29,695 INFO [train.py:823] (1/4) Epoch 4, batch 400, loss[loss=0.1976, simple_loss=0.3453, pruned_loss=0.2499, over 7243.00 frames.], tot_loss[loss=0.2377, simple_loss=0.3572, pruned_loss=1.676, over 1226221.68 frames.], batch size: 25, lr: 2.60e-03 +2022-05-27 14:42:08,254 INFO [train.py:823] (1/4) Epoch 4, batch 450, loss[loss=0.1766, simple_loss=0.3134, pruned_loss=0.1995, over 7162.00 frames.], tot_loss[loss=0.2282, simple_loss=0.3558, pruned_loss=1.32, over 1268363.43 frames.], batch size: 17, lr: 2.59e-03 +2022-05-27 14:42:47,477 INFO [train.py:823] (1/4) Epoch 4, batch 500, loss[loss=0.2021, simple_loss=0.3639, pruned_loss=0.202, over 7215.00 frames.], tot_loss[loss=0.2213, simple_loss=0.3558, pruned_loss=1.052, over 1305192.78 frames.], batch size: 25, lr: 2.59e-03 +2022-05-27 14:43:26,644 INFO [train.py:823] (1/4) Epoch 4, batch 550, loss[loss=0.1862, simple_loss=0.3373, pruned_loss=0.1756, over 7391.00 frames.], tot_loss[loss=0.2149, simple_loss=0.3542, pruned_loss=0.8496, over 1332012.35 frames.], batch size: 19, lr: 2.58e-03 +2022-05-27 14:44:06,023 INFO [train.py:823] (1/4) Epoch 4, batch 600, loss[loss=0.2019, simple_loss=0.3671, pruned_loss=0.1834, over 7199.00 frames.], tot_loss[loss=0.2099, simple_loss=0.3531, pruned_loss=0.6943, over 1353997.24 frames.], batch size: 21, lr: 2.57e-03 +2022-05-27 14:44:44,732 INFO [train.py:823] (1/4) Epoch 4, batch 650, loss[loss=0.1801, simple_loss=0.329, pruned_loss=0.1556, over 7369.00 frames.], tot_loss[loss=0.2048, simple_loss=0.3505, pruned_loss=0.5735, over 1370274.31 frames.], batch size: 20, lr: 2.57e-03 +2022-05-27 14:45:23,900 INFO [train.py:823] (1/4) Epoch 4, batch 700, loss[loss=0.1991, simple_loss=0.3602, pruned_loss=0.1901, over 5010.00 frames.], tot_loss[loss=0.2037, simple_loss=0.353, pruned_loss=0.4869, over 1376822.69 frames.], batch size: 47, lr: 2.56e-03 +2022-05-27 14:46:02,611 INFO [train.py:823] (1/4) Epoch 4, batch 750, loss[loss=0.1913, simple_loss=0.3505, pruned_loss=0.1602, over 7099.00 frames.], tot_loss[loss=0.2009, simple_loss=0.352, pruned_loss=0.4155, over 1384770.05 frames.], batch size: 19, lr: 2.56e-03 +2022-05-27 14:46:41,994 INFO [train.py:823] (1/4) Epoch 4, batch 800, loss[loss=0.158, simple_loss=0.293, pruned_loss=0.1147, over 7016.00 frames.], tot_loss[loss=0.1983, simple_loss=0.3507, pruned_loss=0.359, over 1387450.17 frames.], batch size: 17, lr: 2.55e-03 +2022-05-27 14:47:21,026 INFO [train.py:823] (1/4) Epoch 4, batch 850, loss[loss=0.1986, simple_loss=0.3625, pruned_loss=0.1738, over 7313.00 frames.], tot_loss[loss=0.196, simple_loss=0.3492, pruned_loss=0.3139, over 1392701.21 frames.], batch size: 22, lr: 2.54e-03 +2022-05-27 14:47:59,963 INFO [train.py:823] (1/4) Epoch 4, batch 900, loss[loss=0.1622, simple_loss=0.3023, pruned_loss=0.1109, over 7185.00 frames.], tot_loss[loss=0.1933, simple_loss=0.3466, pruned_loss=0.278, over 1389384.97 frames.], batch size: 18, lr: 2.54e-03 +2022-05-27 14:48:51,286 INFO [train.py:823] (1/4) Epoch 5, batch 0, loss[loss=0.2011, simple_loss=0.3673, pruned_loss=0.1745, over 7335.00 frames.], tot_loss[loss=0.2011, simple_loss=0.3673, pruned_loss=0.1745, over 7335.00 frames.], batch size: 23, lr: 2.44e-03 +2022-05-27 14:49:30,534 INFO [train.py:823] (1/4) Epoch 5, batch 50, loss[loss=0.1981, simple_loss=0.3639, pruned_loss=0.1617, over 6961.00 frames.], tot_loss[loss=0.1826, simple_loss=0.336, pruned_loss=0.1455, over 325626.83 frames.], batch size: 26, lr: 2.44e-03 +2022-05-27 14:50:10,130 INFO [train.py:823] (1/4) Epoch 5, batch 100, loss[loss=0.1783, simple_loss=0.3292, pruned_loss=0.1374, over 7111.00 frames.], tot_loss[loss=0.1815, simple_loss=0.3347, pruned_loss=0.1418, over 569619.04 frames.], batch size: 20, lr: 2.43e-03 +2022-05-27 14:50:49,491 INFO [train.py:823] (1/4) Epoch 5, batch 150, loss[loss=0.173, simple_loss=0.3223, pruned_loss=0.1187, over 7382.00 frames.], tot_loss[loss=0.1812, simple_loss=0.3341, pruned_loss=0.1417, over 757758.11 frames.], batch size: 20, lr: 2.42e-03 +2022-05-27 14:51:28,466 INFO [train.py:823] (1/4) Epoch 5, batch 200, loss[loss=0.1916, simple_loss=0.3548, pruned_loss=0.1414, over 7160.00 frames.], tot_loss[loss=0.1822, simple_loss=0.3361, pruned_loss=0.1419, over 904625.75 frames.], batch size: 22, lr: 2.42e-03 +2022-05-27 14:52:07,832 INFO [train.py:823] (1/4) Epoch 5, batch 250, loss[loss=0.1922, simple_loss=0.3542, pruned_loss=0.1511, over 4967.00 frames.], tot_loss[loss=0.1828, simple_loss=0.3369, pruned_loss=0.1432, over 1015069.83 frames.], batch size: 48, lr: 2.41e-03 +2022-05-27 14:52:46,743 INFO [train.py:823] (1/4) Epoch 5, batch 300, loss[loss=0.1879, simple_loss=0.35, pruned_loss=0.1287, over 7161.00 frames.], tot_loss[loss=0.1832, simple_loss=0.3379, pruned_loss=0.1425, over 1105470.83 frames.], batch size: 23, lr: 2.41e-03 +2022-05-27 14:53:26,207 INFO [train.py:823] (1/4) Epoch 5, batch 350, loss[loss=0.1928, simple_loss=0.3575, pruned_loss=0.1406, over 7221.00 frames.], tot_loss[loss=0.183, simple_loss=0.3378, pruned_loss=0.1411, over 1176011.27 frames.], batch size: 24, lr: 2.40e-03 +2022-05-27 14:54:05,560 INFO [train.py:823] (1/4) Epoch 5, batch 400, loss[loss=0.1836, simple_loss=0.3392, pruned_loss=0.1401, over 7021.00 frames.], tot_loss[loss=0.1828, simple_loss=0.3376, pruned_loss=0.14, over 1235303.93 frames.], batch size: 17, lr: 2.39e-03 +2022-05-27 14:54:45,105 INFO [train.py:823] (1/4) Epoch 5, batch 450, loss[loss=0.1797, simple_loss=0.3332, pruned_loss=0.1304, over 7046.00 frames.], tot_loss[loss=0.1827, simple_loss=0.3375, pruned_loss=0.139, over 1270798.89 frames.], batch size: 26, lr: 2.39e-03 +2022-05-27 14:55:24,631 INFO [train.py:823] (1/4) Epoch 5, batch 500, loss[loss=0.1556, simple_loss=0.2931, pruned_loss=0.09063, over 7187.00 frames.], tot_loss[loss=0.1817, simple_loss=0.336, pruned_loss=0.137, over 1305775.70 frames.], batch size: 19, lr: 2.38e-03 +2022-05-27 14:56:03,718 INFO [train.py:823] (1/4) Epoch 5, batch 550, loss[loss=0.1719, simple_loss=0.3215, pruned_loss=0.1117, over 6936.00 frames.], tot_loss[loss=0.1811, simple_loss=0.3351, pruned_loss=0.135, over 1331825.69 frames.], batch size: 29, lr: 2.38e-03 +2022-05-27 14:56:42,868 INFO [train.py:823] (1/4) Epoch 5, batch 600, loss[loss=0.1804, simple_loss=0.3349, pruned_loss=0.1294, over 6555.00 frames.], tot_loss[loss=0.1808, simple_loss=0.3347, pruned_loss=0.1346, over 1350295.07 frames.], batch size: 34, lr: 2.37e-03 +2022-05-27 14:57:22,156 INFO [train.py:823] (1/4) Epoch 5, batch 650, loss[loss=0.1979, simple_loss=0.3678, pruned_loss=0.1397, over 7285.00 frames.], tot_loss[loss=0.1803, simple_loss=0.334, pruned_loss=0.133, over 1365083.46 frames.], batch size: 21, lr: 2.37e-03 +2022-05-27 14:58:00,828 INFO [train.py:823] (1/4) Epoch 5, batch 700, loss[loss=0.1993, simple_loss=0.3671, pruned_loss=0.1574, over 7065.00 frames.], tot_loss[loss=0.1802, simple_loss=0.3339, pruned_loss=0.1324, over 1374133.98 frames.], batch size: 26, lr: 2.36e-03 +2022-05-27 14:58:39,873 INFO [train.py:823] (1/4) Epoch 5, batch 750, loss[loss=0.2053, simple_loss=0.3769, pruned_loss=0.1686, over 7157.00 frames.], tot_loss[loss=0.1801, simple_loss=0.3339, pruned_loss=0.1314, over 1382399.55 frames.], batch size: 23, lr: 2.35e-03 +2022-05-27 14:59:18,663 INFO [train.py:823] (1/4) Epoch 5, batch 800, loss[loss=0.1948, simple_loss=0.3563, pruned_loss=0.1664, over 5086.00 frames.], tot_loss[loss=0.1799, simple_loss=0.3335, pruned_loss=0.131, over 1392287.98 frames.], batch size: 47, lr: 2.35e-03 +2022-05-27 14:59:59,045 INFO [train.py:823] (1/4) Epoch 5, batch 850, loss[loss=0.1553, simple_loss=0.2923, pruned_loss=0.09135, over 7155.00 frames.], tot_loss[loss=0.1789, simple_loss=0.3321, pruned_loss=0.1289, over 1398749.11 frames.], batch size: 17, lr: 2.34e-03 +2022-05-27 15:00:37,900 INFO [train.py:823] (1/4) Epoch 5, batch 900, loss[loss=0.1989, simple_loss=0.368, pruned_loss=0.1495, over 6903.00 frames.], tot_loss[loss=0.1795, simple_loss=0.3331, pruned_loss=0.1292, over 1400731.75 frames.], batch size: 29, lr: 2.34e-03 +2022-05-27 15:01:33,740 INFO [train.py:823] (1/4) Epoch 6, batch 0, loss[loss=0.189, simple_loss=0.351, pruned_loss=0.1347, over 7165.00 frames.], tot_loss[loss=0.189, simple_loss=0.351, pruned_loss=0.1347, over 7165.00 frames.], batch size: 22, lr: 2.24e-03 +2022-05-27 15:02:12,494 INFO [train.py:823] (1/4) Epoch 6, batch 50, loss[loss=0.1747, simple_loss=0.3265, pruned_loss=0.1147, over 7185.00 frames.], tot_loss[loss=0.1766, simple_loss=0.3292, pruned_loss=0.1203, over 318919.82 frames.], batch size: 21, lr: 2.23e-03 +2022-05-27 15:02:52,426 INFO [train.py:823] (1/4) Epoch 6, batch 100, loss[loss=0.1742, simple_loss=0.3247, pruned_loss=0.119, over 7222.00 frames.], tot_loss[loss=0.1718, simple_loss=0.3208, pruned_loss=0.1146, over 565330.45 frames.], batch size: 24, lr: 2.23e-03 +2022-05-27 15:03:32,859 INFO [train.py:823] (1/4) Epoch 6, batch 150, loss[loss=0.1934, simple_loss=0.358, pruned_loss=0.1438, over 7304.00 frames.], tot_loss[loss=0.1727, simple_loss=0.3225, pruned_loss=0.1152, over 754702.04 frames.], batch size: 19, lr: 2.22e-03 +2022-05-27 15:04:12,188 INFO [train.py:823] (1/4) Epoch 6, batch 200, loss[loss=0.1995, simple_loss=0.3694, pruned_loss=0.1486, over 7218.00 frames.], tot_loss[loss=0.1727, simple_loss=0.3224, pruned_loss=0.1155, over 901014.07 frames.], batch size: 25, lr: 2.22e-03 +2022-05-27 15:04:50,827 INFO [train.py:823] (1/4) Epoch 6, batch 250, loss[loss=0.1933, simple_loss=0.3593, pruned_loss=0.1366, over 6537.00 frames.], tot_loss[loss=0.1738, simple_loss=0.3242, pruned_loss=0.1168, over 1017789.25 frames.], batch size: 34, lr: 2.21e-03 +2022-05-27 15:05:29,751 INFO [train.py:823] (1/4) Epoch 6, batch 300, loss[loss=0.1794, simple_loss=0.3362, pruned_loss=0.113, over 7200.00 frames.], tot_loss[loss=0.1735, simple_loss=0.3241, pruned_loss=0.1152, over 1108051.09 frames.], batch size: 20, lr: 2.21e-03 +2022-05-27 15:06:08,819 INFO [train.py:823] (1/4) Epoch 6, batch 350, loss[loss=0.169, simple_loss=0.3172, pruned_loss=0.1042, over 7098.00 frames.], tot_loss[loss=0.1736, simple_loss=0.3242, pruned_loss=0.1153, over 1178763.25 frames.], batch size: 18, lr: 2.20e-03 +2022-05-27 15:06:48,043 INFO [train.py:823] (1/4) Epoch 6, batch 400, loss[loss=0.1806, simple_loss=0.3354, pruned_loss=0.1291, over 7162.00 frames.], tot_loss[loss=0.1726, simple_loss=0.3223, pruned_loss=0.1142, over 1234845.68 frames.], batch size: 22, lr: 2.19e-03 +2022-05-27 15:07:26,437 INFO [train.py:823] (1/4) Epoch 6, batch 450, loss[loss=0.1658, simple_loss=0.3112, pruned_loss=0.1016, over 6635.00 frames.], tot_loss[loss=0.1722, simple_loss=0.3217, pruned_loss=0.113, over 1267394.92 frames.], batch size: 34, lr: 2.19e-03 +2022-05-27 15:08:05,529 INFO [train.py:823] (1/4) Epoch 6, batch 500, loss[loss=0.1985, simple_loss=0.3632, pruned_loss=0.1689, over 7141.00 frames.], tot_loss[loss=0.1727, simple_loss=0.3227, pruned_loss=0.1132, over 1298316.14 frames.], batch size: 23, lr: 2.18e-03 +2022-05-27 15:08:44,650 INFO [train.py:823] (1/4) Epoch 6, batch 550, loss[loss=0.1652, simple_loss=0.3104, pruned_loss=0.09969, over 7094.00 frames.], tot_loss[loss=0.1724, simple_loss=0.3223, pruned_loss=0.113, over 1325257.90 frames.], batch size: 18, lr: 2.18e-03 +2022-05-27 15:09:24,184 INFO [train.py:823] (1/4) Epoch 6, batch 600, loss[loss=0.1534, simple_loss=0.2901, pruned_loss=0.08319, over 7089.00 frames.], tot_loss[loss=0.1727, simple_loss=0.3226, pruned_loss=0.1135, over 1342109.40 frames.], batch size: 18, lr: 2.17e-03 +2022-05-27 15:10:02,612 INFO [train.py:823] (1/4) Epoch 6, batch 650, loss[loss=0.1609, simple_loss=0.3001, pruned_loss=0.1081, over 7390.00 frames.], tot_loss[loss=0.1717, simple_loss=0.3209, pruned_loss=0.1122, over 1359505.02 frames.], batch size: 19, lr: 2.17e-03 +2022-05-27 15:10:41,870 INFO [train.py:823] (1/4) Epoch 6, batch 700, loss[loss=0.1552, simple_loss=0.2944, pruned_loss=0.0798, over 7200.00 frames.], tot_loss[loss=0.1718, simple_loss=0.3212, pruned_loss=0.1119, over 1375017.51 frames.], batch size: 19, lr: 2.16e-03 +2022-05-27 15:11:20,968 INFO [train.py:823] (1/4) Epoch 6, batch 750, loss[loss=0.1731, simple_loss=0.3239, pruned_loss=0.1113, over 7104.00 frames.], tot_loss[loss=0.1724, simple_loss=0.3223, pruned_loss=0.1125, over 1383298.74 frames.], batch size: 19, lr: 2.16e-03 +2022-05-27 15:12:00,594 INFO [train.py:823] (1/4) Epoch 6, batch 800, loss[loss=0.1524, simple_loss=0.2862, pruned_loss=0.09323, over 7005.00 frames.], tot_loss[loss=0.1718, simple_loss=0.3212, pruned_loss=0.1119, over 1389827.27 frames.], batch size: 16, lr: 2.15e-03 +2022-05-27 15:12:39,768 INFO [train.py:823] (1/4) Epoch 6, batch 850, loss[loss=0.1552, simple_loss=0.2916, pruned_loss=0.09453, over 6843.00 frames.], tot_loss[loss=0.1721, simple_loss=0.3218, pruned_loss=0.1125, over 1394314.10 frames.], batch size: 15, lr: 2.15e-03 +2022-05-27 15:13:19,373 INFO [train.py:823] (1/4) Epoch 6, batch 900, loss[loss=0.1592, simple_loss=0.296, pruned_loss=0.112, over 7273.00 frames.], tot_loss[loss=0.1704, simple_loss=0.3188, pruned_loss=0.1097, over 1397659.99 frames.], batch size: 17, lr: 2.14e-03 +2022-05-27 15:14:12,738 INFO [train.py:823] (1/4) Epoch 7, batch 0, loss[loss=0.1696, simple_loss=0.3186, pruned_loss=0.1027, over 7100.00 frames.], tot_loss[loss=0.1696, simple_loss=0.3186, pruned_loss=0.1027, over 7100.00 frames.], batch size: 19, lr: 2.05e-03 +2022-05-27 15:14:52,606 INFO [train.py:823] (1/4) Epoch 7, batch 50, loss[loss=0.1419, simple_loss=0.2669, pruned_loss=0.08492, over 6807.00 frames.], tot_loss[loss=0.1649, simple_loss=0.3099, pruned_loss=0.09917, over 322424.66 frames.], batch size: 15, lr: 2.04e-03 +2022-05-27 15:15:31,793 INFO [train.py:823] (1/4) Epoch 7, batch 100, loss[loss=0.1714, simple_loss=0.3222, pruned_loss=0.1034, over 7105.00 frames.], tot_loss[loss=0.1645, simple_loss=0.3091, pruned_loss=0.09941, over 561746.28 frames.], batch size: 20, lr: 2.04e-03 +2022-05-27 15:16:10,862 INFO [train.py:823] (1/4) Epoch 7, batch 150, loss[loss=0.178, simple_loss=0.3338, pruned_loss=0.1109, over 7382.00 frames.], tot_loss[loss=0.1659, simple_loss=0.3115, pruned_loss=0.1009, over 752360.69 frames.], batch size: 21, lr: 2.03e-03 +2022-05-27 15:16:50,092 INFO [train.py:823] (1/4) Epoch 7, batch 200, loss[loss=0.174, simple_loss=0.3269, pruned_loss=0.105, over 7017.00 frames.], tot_loss[loss=0.1664, simple_loss=0.3123, pruned_loss=0.1018, over 903308.78 frames.], batch size: 26, lr: 2.03e-03 +2022-05-27 15:17:29,143 INFO [train.py:823] (1/4) Epoch 7, batch 250, loss[loss=0.165, simple_loss=0.3134, pruned_loss=0.0833, over 7312.00 frames.], tot_loss[loss=0.1657, simple_loss=0.3114, pruned_loss=0.1005, over 1018543.96 frames.], batch size: 22, lr: 2.02e-03 +2022-05-27 15:18:07,894 INFO [train.py:823] (1/4) Epoch 7, batch 300, loss[loss=0.1487, simple_loss=0.2782, pruned_loss=0.0957, over 7166.00 frames.], tot_loss[loss=0.1656, simple_loss=0.3111, pruned_loss=0.1004, over 1108530.68 frames.], batch size: 17, lr: 2.02e-03 +2022-05-27 15:18:47,542 INFO [train.py:823] (1/4) Epoch 7, batch 350, loss[loss=0.2549, simple_loss=0.3086, pruned_loss=0.1006, over 7290.00 frames.], tot_loss[loss=0.1964, simple_loss=0.3143, pruned_loss=0.1056, over 1175711.06 frames.], batch size: 19, lr: 2.01e-03 +2022-05-27 15:19:26,415 INFO [train.py:823] (1/4) Epoch 7, batch 400, loss[loss=0.26, simple_loss=0.3321, pruned_loss=0.09392, over 7325.00 frames.], tot_loss[loss=0.2167, simple_loss=0.317, pruned_loss=0.1076, over 1230234.90 frames.], batch size: 23, lr: 2.01e-03 +2022-05-27 15:20:05,959 INFO [train.py:823] (1/4) Epoch 7, batch 450, loss[loss=0.2708, simple_loss=0.3261, pruned_loss=0.1078, over 7166.00 frames.], tot_loss[loss=0.2294, simple_loss=0.3187, pruned_loss=0.1073, over 1267365.47 frames.], batch size: 22, lr: 2.00e-03 +2022-05-27 15:20:45,032 INFO [train.py:823] (1/4) Epoch 7, batch 500, loss[loss=0.3233, simple_loss=0.3654, pruned_loss=0.1406, over 7015.00 frames.], tot_loss[loss=0.2376, simple_loss=0.3194, pruned_loss=0.1062, over 1301271.62 frames.], batch size: 26, lr: 2.00e-03 +2022-05-27 15:21:24,292 INFO [train.py:823] (1/4) Epoch 7, batch 550, loss[loss=0.269, simple_loss=0.3198, pruned_loss=0.1091, over 6636.00 frames.], tot_loss[loss=0.2416, simple_loss=0.3181, pruned_loss=0.1042, over 1325389.80 frames.], batch size: 34, lr: 1.99e-03 +2022-05-27 15:22:03,254 INFO [train.py:823] (1/4) Epoch 7, batch 600, loss[loss=0.2727, simple_loss=0.3393, pruned_loss=0.103, over 7380.00 frames.], tot_loss[loss=0.2461, simple_loss=0.3185, pruned_loss=0.1035, over 1343608.29 frames.], batch size: 21, lr: 1.99e-03 +2022-05-27 15:22:42,687 INFO [train.py:823] (1/4) Epoch 7, batch 650, loss[loss=0.2752, simple_loss=0.3278, pruned_loss=0.1113, over 7116.00 frames.], tot_loss[loss=0.2496, simple_loss=0.3183, pruned_loss=0.1032, over 1360885.87 frames.], batch size: 20, lr: 1.98e-03 +2022-05-27 15:23:24,561 INFO [train.py:823] (1/4) Epoch 7, batch 700, loss[loss=0.2055, simple_loss=0.2742, pruned_loss=0.06846, over 7089.00 frames.], tot_loss[loss=0.2524, simple_loss=0.3188, pruned_loss=0.1029, over 1368891.81 frames.], batch size: 18, lr: 1.98e-03 +2022-05-27 15:24:03,770 INFO [train.py:823] (1/4) Epoch 7, batch 750, loss[loss=0.2749, simple_loss=0.335, pruned_loss=0.1074, over 6995.00 frames.], tot_loss[loss=0.252, simple_loss=0.3168, pruned_loss=0.1012, over 1377925.63 frames.], batch size: 26, lr: 1.97e-03 +2022-05-27 15:24:43,857 INFO [train.py:823] (1/4) Epoch 7, batch 800, loss[loss=0.3085, simple_loss=0.3449, pruned_loss=0.136, over 7193.00 frames.], tot_loss[loss=0.2542, simple_loss=0.3181, pruned_loss=0.101, over 1387839.17 frames.], batch size: 19, lr: 1.97e-03 +2022-05-27 15:25:23,388 INFO [train.py:823] (1/4) Epoch 7, batch 850, loss[loss=0.2533, simple_loss=0.3156, pruned_loss=0.09548, over 7382.00 frames.], tot_loss[loss=0.2561, simple_loss=0.3192, pruned_loss=0.1011, over 1388669.07 frames.], batch size: 21, lr: 1.97e-03 +2022-05-27 15:26:02,040 INFO [train.py:823] (1/4) Epoch 7, batch 900, loss[loss=0.267, simple_loss=0.33, pruned_loss=0.102, over 6999.00 frames.], tot_loss[loss=0.2562, simple_loss=0.3189, pruned_loss=0.1003, over 1390467.36 frames.], batch size: 29, lr: 1.96e-03 +2022-05-27 15:26:53,928 INFO [train.py:823] (1/4) Epoch 8, batch 0, loss[loss=0.2682, simple_loss=0.3301, pruned_loss=0.1031, over 7426.00 frames.], tot_loss[loss=0.2682, simple_loss=0.3301, pruned_loss=0.1031, over 7426.00 frames.], batch size: 22, lr: 1.88e-03 +2022-05-27 15:27:33,911 INFO [train.py:823] (1/4) Epoch 8, batch 50, loss[loss=0.2293, simple_loss=0.3015, pruned_loss=0.07857, over 7266.00 frames.], tot_loss[loss=0.25, simple_loss=0.3156, pruned_loss=0.09223, over 320515.75 frames.], batch size: 24, lr: 1.87e-03 +2022-05-27 15:28:13,341 INFO [train.py:823] (1/4) Epoch 8, batch 100, loss[loss=0.236, simple_loss=0.2948, pruned_loss=0.08862, over 7022.00 frames.], tot_loss[loss=0.2525, simple_loss=0.3179, pruned_loss=0.09358, over 563865.70 frames.], batch size: 17, lr: 1.87e-03 +2022-05-27 15:28:52,005 INFO [train.py:823] (1/4) Epoch 8, batch 150, loss[loss=0.2483, simple_loss=0.3051, pruned_loss=0.09578, over 7293.00 frames.], tot_loss[loss=0.2515, simple_loss=0.3153, pruned_loss=0.09383, over 752973.36 frames.], batch size: 20, lr: 1.86e-03 +2022-05-27 15:29:31,407 INFO [train.py:823] (1/4) Epoch 8, batch 200, loss[loss=0.2295, simple_loss=0.2777, pruned_loss=0.09066, over 6990.00 frames.], tot_loss[loss=0.2509, simple_loss=0.315, pruned_loss=0.0934, over 899345.75 frames.], batch size: 16, lr: 1.86e-03 +2022-05-27 15:30:10,540 INFO [train.py:823] (1/4) Epoch 8, batch 250, loss[loss=0.2457, simple_loss=0.3305, pruned_loss=0.08045, over 7143.00 frames.], tot_loss[loss=0.2486, simple_loss=0.3136, pruned_loss=0.09183, over 1013356.15 frames.], batch size: 23, lr: 1.85e-03 +2022-05-27 15:30:49,823 INFO [train.py:823] (1/4) Epoch 8, batch 300, loss[loss=0.2744, simple_loss=0.3323, pruned_loss=0.1082, over 7390.00 frames.], tot_loss[loss=0.2493, simple_loss=0.3141, pruned_loss=0.09229, over 1106223.46 frames.], batch size: 19, lr: 1.85e-03 +2022-05-27 15:31:28,734 INFO [train.py:823] (1/4) Epoch 8, batch 350, loss[loss=0.223, simple_loss=0.2828, pruned_loss=0.0816, over 7016.00 frames.], tot_loss[loss=0.2479, simple_loss=0.3124, pruned_loss=0.09167, over 1167186.44 frames.], batch size: 16, lr: 1.85e-03 +2022-05-27 15:32:08,142 INFO [train.py:823] (1/4) Epoch 8, batch 400, loss[loss=0.2711, simple_loss=0.3331, pruned_loss=0.1046, over 7178.00 frames.], tot_loss[loss=0.2483, simple_loss=0.3134, pruned_loss=0.09162, over 1222828.89 frames.], batch size: 22, lr: 1.84e-03 +2022-05-27 15:32:47,367 INFO [train.py:823] (1/4) Epoch 8, batch 450, loss[loss=0.2457, simple_loss=0.316, pruned_loss=0.08767, over 6354.00 frames.], tot_loss[loss=0.2484, simple_loss=0.3139, pruned_loss=0.09146, over 1265014.34 frames.], batch size: 34, lr: 1.84e-03 +2022-05-27 15:33:26,783 INFO [train.py:823] (1/4) Epoch 8, batch 500, loss[loss=0.223, simple_loss=0.281, pruned_loss=0.08249, over 7267.00 frames.], tot_loss[loss=0.2486, simple_loss=0.3137, pruned_loss=0.09174, over 1300418.88 frames.], batch size: 17, lr: 1.83e-03 +2022-05-27 15:34:05,407 INFO [train.py:823] (1/4) Epoch 8, batch 550, loss[loss=0.2668, simple_loss=0.3177, pruned_loss=0.108, over 7166.00 frames.], tot_loss[loss=0.2485, simple_loss=0.3142, pruned_loss=0.09135, over 1324421.74 frames.], batch size: 22, lr: 1.83e-03 +2022-05-27 15:34:44,877 INFO [train.py:823] (1/4) Epoch 8, batch 600, loss[loss=0.2399, simple_loss=0.3083, pruned_loss=0.08578, over 7027.00 frames.], tot_loss[loss=0.2488, simple_loss=0.3146, pruned_loss=0.09151, over 1343594.38 frames.], batch size: 17, lr: 1.82e-03 +2022-05-27 15:35:24,013 INFO [train.py:823] (1/4) Epoch 8, batch 650, loss[loss=0.2306, simple_loss=0.3056, pruned_loss=0.07783, over 7004.00 frames.], tot_loss[loss=0.2468, simple_loss=0.3133, pruned_loss=0.0901, over 1361345.01 frames.], batch size: 26, lr: 1.82e-03 +2022-05-27 15:36:03,185 INFO [train.py:823] (1/4) Epoch 8, batch 700, loss[loss=0.2031, simple_loss=0.279, pruned_loss=0.06354, over 7300.00 frames.], tot_loss[loss=0.2467, simple_loss=0.3135, pruned_loss=0.08997, over 1380432.26 frames.], batch size: 19, lr: 1.82e-03 +2022-05-27 15:36:42,098 INFO [train.py:823] (1/4) Epoch 8, batch 750, loss[loss=0.1955, simple_loss=0.2757, pruned_loss=0.05762, over 7088.00 frames.], tot_loss[loss=0.2452, simple_loss=0.3126, pruned_loss=0.08893, over 1387474.95 frames.], batch size: 18, lr: 1.81e-03 +2022-05-27 15:37:21,558 INFO [train.py:823] (1/4) Epoch 8, batch 800, loss[loss=0.2545, simple_loss=0.3183, pruned_loss=0.09533, over 5512.00 frames.], tot_loss[loss=0.2446, simple_loss=0.312, pruned_loss=0.0886, over 1389457.74 frames.], batch size: 47, lr: 1.81e-03 +2022-05-27 15:38:00,592 INFO [train.py:823] (1/4) Epoch 8, batch 850, loss[loss=0.2693, simple_loss=0.3432, pruned_loss=0.09767, over 7187.00 frames.], tot_loss[loss=0.2435, simple_loss=0.3107, pruned_loss=0.0881, over 1391391.02 frames.], batch size: 20, lr: 1.80e-03 +2022-05-27 15:38:39,692 INFO [train.py:823] (1/4) Epoch 8, batch 900, loss[loss=0.2317, simple_loss=0.3029, pruned_loss=0.08025, over 7086.00 frames.], tot_loss[loss=0.2434, simple_loss=0.311, pruned_loss=0.08797, over 1395088.65 frames.], batch size: 18, lr: 1.80e-03 +2022-05-27 15:39:31,035 INFO [train.py:823] (1/4) Epoch 9, batch 0, loss[loss=0.2864, simple_loss=0.3435, pruned_loss=0.1146, over 7189.00 frames.], tot_loss[loss=0.2864, simple_loss=0.3435, pruned_loss=0.1146, over 7189.00 frames.], batch size: 21, lr: 1.72e-03 +2022-05-27 15:40:10,098 INFO [train.py:823] (1/4) Epoch 9, batch 50, loss[loss=0.1974, simple_loss=0.2799, pruned_loss=0.05746, over 7389.00 frames.], tot_loss[loss=0.2348, simple_loss=0.305, pruned_loss=0.08234, over 319310.05 frames.], batch size: 19, lr: 1.72e-03 +2022-05-27 15:40:49,185 INFO [train.py:823] (1/4) Epoch 9, batch 100, loss[loss=0.2141, simple_loss=0.2854, pruned_loss=0.07141, over 7298.00 frames.], tot_loss[loss=0.2346, simple_loss=0.3055, pruned_loss=0.08188, over 563251.78 frames.], batch size: 19, lr: 1.71e-03 +2022-05-27 15:41:28,162 INFO [train.py:823] (1/4) Epoch 9, batch 150, loss[loss=0.2297, simple_loss=0.3024, pruned_loss=0.07851, over 7098.00 frames.], tot_loss[loss=0.2347, simple_loss=0.3056, pruned_loss=0.08193, over 753204.55 frames.], batch size: 19, lr: 1.71e-03 +2022-05-27 15:42:06,936 INFO [train.py:823] (1/4) Epoch 9, batch 200, loss[loss=0.2346, simple_loss=0.3116, pruned_loss=0.07883, over 7278.00 frames.], tot_loss[loss=0.2343, simple_loss=0.3057, pruned_loss=0.08141, over 895378.76 frames.], batch size: 20, lr: 1.71e-03 +2022-05-27 15:42:46,303 INFO [train.py:823] (1/4) Epoch 9, batch 250, loss[loss=0.2371, simple_loss=0.3075, pruned_loss=0.08329, over 7194.00 frames.], tot_loss[loss=0.2346, simple_loss=0.3057, pruned_loss=0.08171, over 1011775.04 frames.], batch size: 20, lr: 1.70e-03 +2022-05-27 15:43:24,877 INFO [train.py:823] (1/4) Epoch 9, batch 300, loss[loss=0.2204, simple_loss=0.2948, pruned_loss=0.07303, over 7194.00 frames.], tot_loss[loss=0.2338, simple_loss=0.3055, pruned_loss=0.08106, over 1103202.08 frames.], batch size: 18, lr: 1.70e-03 +2022-05-27 15:44:04,282 INFO [train.py:823] (1/4) Epoch 9, batch 350, loss[loss=0.2222, simple_loss=0.285, pruned_loss=0.07966, over 7289.00 frames.], tot_loss[loss=0.2311, simple_loss=0.3031, pruned_loss=0.07952, over 1173135.09 frames.], batch size: 17, lr: 1.70e-03 +2022-05-27 15:44:43,677 INFO [train.py:823] (1/4) Epoch 9, batch 400, loss[loss=0.2224, simple_loss=0.3069, pruned_loss=0.06892, over 7301.00 frames.], tot_loss[loss=0.2312, simple_loss=0.303, pruned_loss=0.07968, over 1230534.09 frames.], batch size: 22, lr: 1.69e-03 +2022-05-27 15:45:28,892 INFO [train.py:823] (1/4) Epoch 9, batch 450, loss[loss=0.2088, simple_loss=0.2933, pruned_loss=0.06212, over 7195.00 frames.], tot_loss[loss=0.2332, simple_loss=0.3051, pruned_loss=0.08069, over 1271631.12 frames.], batch size: 19, lr: 1.69e-03 +2022-05-27 15:46:08,965 INFO [train.py:823] (1/4) Epoch 9, batch 500, loss[loss=0.2266, simple_loss=0.3081, pruned_loss=0.07251, over 7248.00 frames.], tot_loss[loss=0.2352, simple_loss=0.3066, pruned_loss=0.08186, over 1305081.57 frames.], batch size: 24, lr: 1.68e-03 +2022-05-27 15:46:48,232 INFO [train.py:823] (1/4) Epoch 9, batch 550, loss[loss=0.2454, simple_loss=0.3014, pruned_loss=0.09464, over 7206.00 frames.], tot_loss[loss=0.2349, simple_loss=0.3065, pruned_loss=0.08159, over 1334414.37 frames.], batch size: 19, lr: 1.68e-03 +2022-05-27 15:47:28,226 INFO [train.py:823] (1/4) Epoch 9, batch 600, loss[loss=0.211, simple_loss=0.2774, pruned_loss=0.07231, over 7148.00 frames.], tot_loss[loss=0.2326, simple_loss=0.3043, pruned_loss=0.08039, over 1353461.04 frames.], batch size: 17, lr: 1.68e-03 +2022-05-27 15:48:07,778 INFO [train.py:823] (1/4) Epoch 9, batch 650, loss[loss=0.2379, simple_loss=0.3125, pruned_loss=0.08163, over 7031.00 frames.], tot_loss[loss=0.232, simple_loss=0.3038, pruned_loss=0.08007, over 1366525.95 frames.], batch size: 29, lr: 1.67e-03 +2022-05-27 15:48:46,355 INFO [train.py:823] (1/4) Epoch 9, batch 700, loss[loss=0.2042, simple_loss=0.2924, pruned_loss=0.05797, over 7298.00 frames.], tot_loss[loss=0.2325, simple_loss=0.3047, pruned_loss=0.08013, over 1375208.78 frames.], batch size: 22, lr: 1.67e-03 +2022-05-27 15:49:25,433 INFO [train.py:823] (1/4) Epoch 9, batch 750, loss[loss=0.2061, simple_loss=0.2735, pruned_loss=0.06938, over 7188.00 frames.], tot_loss[loss=0.2341, simple_loss=0.3061, pruned_loss=0.0811, over 1386027.30 frames.], batch size: 18, lr: 1.67e-03 +2022-05-27 15:50:03,859 INFO [train.py:823] (1/4) Epoch 9, batch 800, loss[loss=0.2409, simple_loss=0.3015, pruned_loss=0.09014, over 7098.00 frames.], tot_loss[loss=0.2338, simple_loss=0.306, pruned_loss=0.08083, over 1387040.86 frames.], batch size: 19, lr: 1.66e-03 +2022-05-27 15:50:43,404 INFO [train.py:823] (1/4) Epoch 9, batch 850, loss[loss=0.2125, simple_loss=0.2772, pruned_loss=0.07391, over 6830.00 frames.], tot_loss[loss=0.2336, simple_loss=0.306, pruned_loss=0.08057, over 1396740.91 frames.], batch size: 15, lr: 1.66e-03 +2022-05-27 15:51:23,336 INFO [train.py:823] (1/4) Epoch 9, batch 900, loss[loss=0.2474, simple_loss=0.3136, pruned_loss=0.09059, over 7246.00 frames.], tot_loss[loss=0.235, simple_loss=0.3071, pruned_loss=0.0815, over 1399729.59 frames.], batch size: 16, lr: 1.65e-03 +2022-05-27 15:52:14,252 INFO [train.py:823] (1/4) Epoch 10, batch 0, loss[loss=0.233, simple_loss=0.3081, pruned_loss=0.07891, over 7111.00 frames.], tot_loss[loss=0.233, simple_loss=0.3081, pruned_loss=0.07891, over 7111.00 frames.], batch size: 20, lr: 1.59e-03 +2022-05-27 15:52:52,895 INFO [train.py:823] (1/4) Epoch 10, batch 50, loss[loss=0.2006, simple_loss=0.2774, pruned_loss=0.06191, over 7036.00 frames.], tot_loss[loss=0.2245, simple_loss=0.2989, pruned_loss=0.07502, over 319226.37 frames.], batch size: 17, lr: 1.58e-03 +2022-05-27 15:53:32,109 INFO [train.py:823] (1/4) Epoch 10, batch 100, loss[loss=0.211, simple_loss=0.2889, pruned_loss=0.06653, over 7372.00 frames.], tot_loss[loss=0.2216, simple_loss=0.2968, pruned_loss=0.07323, over 560865.30 frames.], batch size: 20, lr: 1.58e-03 +2022-05-27 15:54:10,795 INFO [train.py:823] (1/4) Epoch 10, batch 150, loss[loss=0.2312, simple_loss=0.3054, pruned_loss=0.07851, over 7276.00 frames.], tot_loss[loss=0.2248, simple_loss=0.3, pruned_loss=0.07481, over 750745.90 frames.], batch size: 20, lr: 1.58e-03 +2022-05-27 15:54:50,486 INFO [train.py:823] (1/4) Epoch 10, batch 200, loss[loss=0.2174, simple_loss=0.2932, pruned_loss=0.07086, over 7281.00 frames.], tot_loss[loss=0.2222, simple_loss=0.2979, pruned_loss=0.07329, over 901995.75 frames.], batch size: 21, lr: 1.57e-03 +2022-05-27 15:55:29,310 INFO [train.py:823] (1/4) Epoch 10, batch 250, loss[loss=0.2161, simple_loss=0.2927, pruned_loss=0.06974, over 7378.00 frames.], tot_loss[loss=0.2219, simple_loss=0.298, pruned_loss=0.07292, over 1017827.86 frames.], batch size: 21, lr: 1.57e-03 +2022-05-27 15:56:08,352 INFO [train.py:823] (1/4) Epoch 10, batch 300, loss[loss=0.2193, simple_loss=0.2984, pruned_loss=0.07009, over 6984.00 frames.], tot_loss[loss=0.2245, simple_loss=0.2998, pruned_loss=0.07463, over 1109226.72 frames.], batch size: 26, lr: 1.57e-03 +2022-05-27 15:56:47,621 INFO [train.py:823] (1/4) Epoch 10, batch 350, loss[loss=0.2352, simple_loss=0.2878, pruned_loss=0.09128, over 7280.00 frames.], tot_loss[loss=0.225, simple_loss=0.3002, pruned_loss=0.07494, over 1175939.41 frames.], batch size: 16, lr: 1.56e-03 +2022-05-27 15:57:26,944 INFO [train.py:823] (1/4) Epoch 10, batch 400, loss[loss=0.2259, simple_loss=0.3062, pruned_loss=0.07279, over 7092.00 frames.], tot_loss[loss=0.2249, simple_loss=0.3006, pruned_loss=0.07462, over 1225930.46 frames.], batch size: 19, lr: 1.56e-03 +2022-05-27 15:58:05,990 INFO [train.py:823] (1/4) Epoch 10, batch 450, loss[loss=0.2277, simple_loss=0.3003, pruned_loss=0.07755, over 7276.00 frames.], tot_loss[loss=0.2257, simple_loss=0.3008, pruned_loss=0.07532, over 1266947.05 frames.], batch size: 20, lr: 1.56e-03 +2022-05-27 15:58:45,239 INFO [train.py:823] (1/4) Epoch 10, batch 500, loss[loss=0.2135, simple_loss=0.2962, pruned_loss=0.06544, over 7273.00 frames.], tot_loss[loss=0.2247, simple_loss=0.3001, pruned_loss=0.0747, over 1299286.75 frames.], batch size: 20, lr: 1.55e-03 +2022-05-27 15:59:23,989 INFO [train.py:823] (1/4) Epoch 10, batch 550, loss[loss=0.1861, simple_loss=0.2706, pruned_loss=0.05082, over 7099.00 frames.], tot_loss[loss=0.2233, simple_loss=0.2988, pruned_loss=0.0739, over 1329015.45 frames.], batch size: 18, lr: 1.55e-03 +2022-05-27 16:00:03,421 INFO [train.py:823] (1/4) Epoch 10, batch 600, loss[loss=0.177, simple_loss=0.2533, pruned_loss=0.05029, over 7278.00 frames.], tot_loss[loss=0.226, simple_loss=0.3008, pruned_loss=0.07561, over 1353827.34 frames.], batch size: 19, lr: 1.55e-03 +2022-05-27 16:00:42,544 INFO [train.py:823] (1/4) Epoch 10, batch 650, loss[loss=0.2477, simple_loss=0.3238, pruned_loss=0.08585, over 7199.00 frames.], tot_loss[loss=0.2248, simple_loss=0.2994, pruned_loss=0.07507, over 1371789.76 frames.], batch size: 21, lr: 1.54e-03 +2022-05-27 16:01:22,294 INFO [train.py:823] (1/4) Epoch 10, batch 700, loss[loss=0.2659, simple_loss=0.3036, pruned_loss=0.114, over 7024.00 frames.], tot_loss[loss=0.2254, simple_loss=0.3003, pruned_loss=0.07518, over 1385287.26 frames.], batch size: 16, lr: 1.54e-03 +2022-05-27 16:02:01,164 INFO [train.py:823] (1/4) Epoch 10, batch 750, loss[loss=0.231, simple_loss=0.2975, pruned_loss=0.08229, over 7186.00 frames.], tot_loss[loss=0.2242, simple_loss=0.2995, pruned_loss=0.07443, over 1392438.96 frames.], batch size: 18, lr: 1.54e-03 +2022-05-27 16:02:40,261 INFO [train.py:823] (1/4) Epoch 10, batch 800, loss[loss=0.2478, simple_loss=0.3226, pruned_loss=0.08651, over 7193.00 frames.], tot_loss[loss=0.2242, simple_loss=0.2996, pruned_loss=0.07434, over 1399590.84 frames.], batch size: 25, lr: 1.53e-03 +2022-05-27 16:03:19,603 INFO [train.py:823] (1/4) Epoch 10, batch 850, loss[loss=0.1991, simple_loss=0.2813, pruned_loss=0.05852, over 7152.00 frames.], tot_loss[loss=0.2236, simple_loss=0.2991, pruned_loss=0.07401, over 1406063.01 frames.], batch size: 22, lr: 1.53e-03 +2022-05-27 16:03:59,157 INFO [train.py:823] (1/4) Epoch 10, batch 900, loss[loss=0.2329, simple_loss=0.3018, pruned_loss=0.08203, over 7233.00 frames.], tot_loss[loss=0.2229, simple_loss=0.2983, pruned_loss=0.07374, over 1406186.60 frames.], batch size: 16, lr: 1.53e-03 +2022-05-27 16:04:53,495 INFO [train.py:823] (1/4) Epoch 11, batch 0, loss[loss=0.2107, simple_loss=0.2894, pruned_loss=0.06601, over 7096.00 frames.], tot_loss[loss=0.2107, simple_loss=0.2894, pruned_loss=0.06601, over 7096.00 frames.], batch size: 19, lr: 1.47e-03 +2022-05-27 16:05:32,581 INFO [train.py:823] (1/4) Epoch 11, batch 50, loss[loss=0.2084, simple_loss=0.2989, pruned_loss=0.05897, over 6492.00 frames.], tot_loss[loss=0.2205, simple_loss=0.2977, pruned_loss=0.07167, over 323174.39 frames.], batch size: 34, lr: 1.47e-03 +2022-05-27 16:06:11,618 INFO [train.py:823] (1/4) Epoch 11, batch 100, loss[loss=0.1721, simple_loss=0.2528, pruned_loss=0.04569, over 7142.00 frames.], tot_loss[loss=0.2191, simple_loss=0.2955, pruned_loss=0.07136, over 569152.74 frames.], batch size: 17, lr: 1.46e-03 +2022-05-27 16:06:50,763 INFO [train.py:823] (1/4) Epoch 11, batch 150, loss[loss=0.2402, simple_loss=0.3166, pruned_loss=0.08185, over 7223.00 frames.], tot_loss[loss=0.2175, simple_loss=0.2944, pruned_loss=0.07024, over 760596.92 frames.], batch size: 24, lr: 1.46e-03 +2022-05-27 16:07:29,180 INFO [train.py:823] (1/4) Epoch 11, batch 200, loss[loss=0.1997, simple_loss=0.2827, pruned_loss=0.0584, over 7106.00 frames.], tot_loss[loss=0.2185, simple_loss=0.2959, pruned_loss=0.07051, over 901436.39 frames.], batch size: 19, lr: 1.46e-03 +2022-05-27 16:08:08,809 INFO [train.py:823] (1/4) Epoch 11, batch 250, loss[loss=0.2376, simple_loss=0.3063, pruned_loss=0.08451, over 7093.00 frames.], tot_loss[loss=0.2197, simple_loss=0.2973, pruned_loss=0.07109, over 1014801.96 frames.], batch size: 18, lr: 1.45e-03 +2022-05-27 16:08:50,571 INFO [train.py:823] (1/4) Epoch 11, batch 300, loss[loss=0.204, simple_loss=0.2866, pruned_loss=0.06066, over 7213.00 frames.], tot_loss[loss=0.2193, simple_loss=0.2963, pruned_loss=0.0711, over 1105633.22 frames.], batch size: 25, lr: 1.45e-03 +2022-05-27 16:09:29,784 INFO [train.py:823] (1/4) Epoch 11, batch 350, loss[loss=0.2113, simple_loss=0.3032, pruned_loss=0.05966, over 7197.00 frames.], tot_loss[loss=0.2186, simple_loss=0.2956, pruned_loss=0.07078, over 1176997.60 frames.], batch size: 25, lr: 1.45e-03 +2022-05-27 16:10:08,817 INFO [train.py:823] (1/4) Epoch 11, batch 400, loss[loss=0.2037, simple_loss=0.3006, pruned_loss=0.05342, over 7094.00 frames.], tot_loss[loss=0.2178, simple_loss=0.295, pruned_loss=0.07029, over 1231342.69 frames.], batch size: 19, lr: 1.44e-03 +2022-05-27 16:10:47,981 INFO [train.py:823] (1/4) Epoch 11, batch 450, loss[loss=0.2095, simple_loss=0.2939, pruned_loss=0.0626, over 7297.00 frames.], tot_loss[loss=0.2172, simple_loss=0.2949, pruned_loss=0.06976, over 1270050.26 frames.], batch size: 22, lr: 1.44e-03 +2022-05-27 16:11:28,137 INFO [train.py:823] (1/4) Epoch 11, batch 500, loss[loss=0.1934, simple_loss=0.2817, pruned_loss=0.05259, over 6395.00 frames.], tot_loss[loss=0.217, simple_loss=0.2949, pruned_loss=0.06954, over 1303304.16 frames.], batch size: 34, lr: 1.44e-03 +2022-05-27 16:12:07,641 INFO [train.py:823] (1/4) Epoch 11, batch 550, loss[loss=0.2205, simple_loss=0.2915, pruned_loss=0.07471, over 7409.00 frames.], tot_loss[loss=0.2165, simple_loss=0.2948, pruned_loss=0.06909, over 1331940.68 frames.], batch size: 22, lr: 1.44e-03 +2022-05-27 16:12:46,622 INFO [train.py:823] (1/4) Epoch 11, batch 600, loss[loss=0.1852, simple_loss=0.2722, pruned_loss=0.04906, over 7381.00 frames.], tot_loss[loss=0.2168, simple_loss=0.2948, pruned_loss=0.06936, over 1350839.67 frames.], batch size: 19, lr: 1.43e-03 +2022-05-27 16:13:26,113 INFO [train.py:823] (1/4) Epoch 11, batch 650, loss[loss=0.2007, simple_loss=0.278, pruned_loss=0.06172, over 7308.00 frames.], tot_loss[loss=0.2172, simple_loss=0.2951, pruned_loss=0.06967, over 1367855.61 frames.], batch size: 18, lr: 1.43e-03 +2022-05-27 16:14:04,486 INFO [train.py:823] (1/4) Epoch 11, batch 700, loss[loss=0.3113, simple_loss=0.355, pruned_loss=0.1338, over 7164.00 frames.], tot_loss[loss=0.2173, simple_loss=0.2952, pruned_loss=0.0697, over 1382336.10 frames.], batch size: 17, lr: 1.43e-03 +2022-05-27 16:14:45,010 INFO [train.py:823] (1/4) Epoch 11, batch 750, loss[loss=0.1532, simple_loss=0.2344, pruned_loss=0.03602, over 7290.00 frames.], tot_loss[loss=0.2164, simple_loss=0.2943, pruned_loss=0.06924, over 1389992.95 frames.], batch size: 17, lr: 1.42e-03 +2022-05-27 16:15:23,551 INFO [train.py:823] (1/4) Epoch 11, batch 800, loss[loss=0.2042, simple_loss=0.2851, pruned_loss=0.06163, over 7194.00 frames.], tot_loss[loss=0.2161, simple_loss=0.2944, pruned_loss=0.06892, over 1395338.88 frames.], batch size: 19, lr: 1.42e-03 +2022-05-27 16:16:03,303 INFO [train.py:823] (1/4) Epoch 11, batch 850, loss[loss=0.2513, simple_loss=0.3405, pruned_loss=0.08102, over 7109.00 frames.], tot_loss[loss=0.2167, simple_loss=0.2948, pruned_loss=0.06929, over 1397754.24 frames.], batch size: 20, lr: 1.42e-03 +2022-05-27 16:16:42,233 INFO [train.py:823] (1/4) Epoch 11, batch 900, loss[loss=0.2075, simple_loss=0.2719, pruned_loss=0.07153, over 6802.00 frames.], tot_loss[loss=0.2174, simple_loss=0.2959, pruned_loss=0.06949, over 1397530.65 frames.], batch size: 15, lr: 1.42e-03 +2022-05-27 16:17:32,980 INFO [train.py:823] (1/4) Epoch 12, batch 0, loss[loss=0.2001, simple_loss=0.2748, pruned_loss=0.06269, over 7304.00 frames.], tot_loss[loss=0.2001, simple_loss=0.2748, pruned_loss=0.06269, over 7304.00 frames.], batch size: 17, lr: 1.36e-03 +2022-05-27 16:18:12,407 INFO [train.py:823] (1/4) Epoch 12, batch 50, loss[loss=0.2202, simple_loss=0.3078, pruned_loss=0.06632, over 7243.00 frames.], tot_loss[loss=0.2119, simple_loss=0.2894, pruned_loss=0.06722, over 316972.22 frames.], batch size: 24, lr: 1.36e-03 +2022-05-27 16:18:51,681 INFO [train.py:823] (1/4) Epoch 12, batch 100, loss[loss=0.2628, simple_loss=0.3361, pruned_loss=0.09475, over 7147.00 frames.], tot_loss[loss=0.2114, simple_loss=0.2902, pruned_loss=0.06626, over 561006.18 frames.], batch size: 23, lr: 1.36e-03 +2022-05-27 16:19:30,766 INFO [train.py:823] (1/4) Epoch 12, batch 150, loss[loss=0.1983, simple_loss=0.2785, pruned_loss=0.05908, over 7288.00 frames.], tot_loss[loss=0.2095, simple_loss=0.2897, pruned_loss=0.0647, over 752301.74 frames.], batch size: 20, lr: 1.36e-03 +2022-05-27 16:20:10,254 INFO [train.py:823] (1/4) Epoch 12, batch 200, loss[loss=0.1956, simple_loss=0.264, pruned_loss=0.06365, over 6827.00 frames.], tot_loss[loss=0.2104, simple_loss=0.2906, pruned_loss=0.06509, over 899136.27 frames.], batch size: 15, lr: 1.35e-03 +2022-05-27 16:20:49,257 INFO [train.py:823] (1/4) Epoch 12, batch 250, loss[loss=0.227, simple_loss=0.3036, pruned_loss=0.07518, over 7029.00 frames.], tot_loss[loss=0.2118, simple_loss=0.2915, pruned_loss=0.06607, over 1016485.73 frames.], batch size: 26, lr: 1.35e-03 +2022-05-27 16:21:28,526 INFO [train.py:823] (1/4) Epoch 12, batch 300, loss[loss=0.2059, simple_loss=0.2812, pruned_loss=0.06529, over 7202.00 frames.], tot_loss[loss=0.2116, simple_loss=0.2912, pruned_loss=0.06594, over 1102976.48 frames.], batch size: 19, lr: 1.35e-03 +2022-05-27 16:22:07,688 INFO [train.py:823] (1/4) Epoch 12, batch 350, loss[loss=0.233, simple_loss=0.3067, pruned_loss=0.07963, over 7342.00 frames.], tot_loss[loss=0.2111, simple_loss=0.2907, pruned_loss=0.06578, over 1176756.92 frames.], batch size: 23, lr: 1.35e-03 +2022-05-27 16:22:46,793 INFO [train.py:823] (1/4) Epoch 12, batch 400, loss[loss=0.2175, simple_loss=0.2991, pruned_loss=0.06793, over 6966.00 frames.], tot_loss[loss=0.2111, simple_loss=0.291, pruned_loss=0.06562, over 1231223.11 frames.], batch size: 29, lr: 1.34e-03 +2022-05-27 16:23:26,137 INFO [train.py:823] (1/4) Epoch 12, batch 450, loss[loss=0.2192, simple_loss=0.2954, pruned_loss=0.07149, over 7378.00 frames.], tot_loss[loss=0.2105, simple_loss=0.2909, pruned_loss=0.06511, over 1273895.98 frames.], batch size: 20, lr: 1.34e-03 +2022-05-27 16:24:05,954 INFO [train.py:823] (1/4) Epoch 12, batch 500, loss[loss=0.2085, simple_loss=0.2958, pruned_loss=0.06059, over 7280.00 frames.], tot_loss[loss=0.2094, simple_loss=0.2898, pruned_loss=0.06455, over 1311806.29 frames.], batch size: 20, lr: 1.34e-03 +2022-05-27 16:24:44,890 INFO [train.py:823] (1/4) Epoch 12, batch 550, loss[loss=0.1875, simple_loss=0.2659, pruned_loss=0.0546, over 7428.00 frames.], tot_loss[loss=0.2106, simple_loss=0.2911, pruned_loss=0.06503, over 1339766.31 frames.], batch size: 18, lr: 1.34e-03 +2022-05-27 16:25:24,208 INFO [train.py:823] (1/4) Epoch 12, batch 600, loss[loss=0.1873, simple_loss=0.2602, pruned_loss=0.0572, over 7211.00 frames.], tot_loss[loss=0.2095, simple_loss=0.29, pruned_loss=0.06447, over 1360176.91 frames.], batch size: 16, lr: 1.33e-03 +2022-05-27 16:26:03,071 INFO [train.py:823] (1/4) Epoch 12, batch 650, loss[loss=0.1939, simple_loss=0.286, pruned_loss=0.05089, over 7279.00 frames.], tot_loss[loss=0.2098, simple_loss=0.2902, pruned_loss=0.06468, over 1371038.42 frames.], batch size: 21, lr: 1.33e-03 +2022-05-27 16:26:41,887 INFO [train.py:823] (1/4) Epoch 12, batch 700, loss[loss=0.209, simple_loss=0.2877, pruned_loss=0.06513, over 7283.00 frames.], tot_loss[loss=0.2107, simple_loss=0.2912, pruned_loss=0.06509, over 1382557.38 frames.], batch size: 20, lr: 1.33e-03 +2022-05-27 16:27:21,127 INFO [train.py:823] (1/4) Epoch 12, batch 750, loss[loss=0.2465, simple_loss=0.326, pruned_loss=0.08352, over 7304.00 frames.], tot_loss[loss=0.2115, simple_loss=0.292, pruned_loss=0.06547, over 1388743.98 frames.], batch size: 22, lr: 1.33e-03 +2022-05-27 16:28:00,197 INFO [train.py:823] (1/4) Epoch 12, batch 800, loss[loss=0.2039, simple_loss=0.2857, pruned_loss=0.06099, over 7296.00 frames.], tot_loss[loss=0.2117, simple_loss=0.292, pruned_loss=0.06573, over 1395593.63 frames.], batch size: 22, lr: 1.32e-03 +2022-05-27 16:28:38,843 INFO [train.py:823] (1/4) Epoch 12, batch 850, loss[loss=0.2006, simple_loss=0.2833, pruned_loss=0.05894, over 7187.00 frames.], tot_loss[loss=0.2111, simple_loss=0.2915, pruned_loss=0.06529, over 1401162.62 frames.], batch size: 18, lr: 1.32e-03 +2022-05-27 16:29:17,812 INFO [train.py:823] (1/4) Epoch 12, batch 900, loss[loss=0.2103, simple_loss=0.2921, pruned_loss=0.06425, over 7098.00 frames.], tot_loss[loss=0.2116, simple_loss=0.2916, pruned_loss=0.06577, over 1396543.75 frames.], batch size: 19, lr: 1.32e-03 +2022-05-27 16:30:08,401 INFO [train.py:823] (1/4) Epoch 13, batch 0, loss[loss=0.1999, simple_loss=0.2921, pruned_loss=0.05389, over 7170.00 frames.], tot_loss[loss=0.1999, simple_loss=0.2921, pruned_loss=0.05389, over 7170.00 frames.], batch size: 22, lr: 1.27e-03 +2022-05-27 16:30:48,273 INFO [train.py:823] (1/4) Epoch 13, batch 50, loss[loss=0.1839, simple_loss=0.2662, pruned_loss=0.05083, over 7296.00 frames.], tot_loss[loss=0.208, simple_loss=0.2876, pruned_loss=0.06416, over 317868.64 frames.], batch size: 19, lr: 1.27e-03 +2022-05-27 16:31:28,615 INFO [train.py:823] (1/4) Epoch 13, batch 100, loss[loss=0.1991, simple_loss=0.2737, pruned_loss=0.06229, over 7329.00 frames.], tot_loss[loss=0.2097, simple_loss=0.2891, pruned_loss=0.06516, over 562640.07 frames.], batch size: 18, lr: 1.27e-03 +2022-05-27 16:32:11,889 INFO [train.py:823] (1/4) Epoch 13, batch 150, loss[loss=0.2042, simple_loss=0.2764, pruned_loss=0.06601, over 7386.00 frames.], tot_loss[loss=0.2087, simple_loss=0.2884, pruned_loss=0.06447, over 752724.13 frames.], batch size: 19, lr: 1.26e-03 +2022-05-27 16:32:57,570 INFO [train.py:823] (1/4) Epoch 13, batch 200, loss[loss=0.1766, simple_loss=0.2534, pruned_loss=0.04996, over 7015.00 frames.], tot_loss[loss=0.2073, simple_loss=0.2876, pruned_loss=0.0635, over 903040.21 frames.], batch size: 17, lr: 1.26e-03 +2022-05-27 16:33:37,082 INFO [train.py:823] (1/4) Epoch 13, batch 250, loss[loss=0.2394, simple_loss=0.3157, pruned_loss=0.08154, over 7149.00 frames.], tot_loss[loss=0.2086, simple_loss=0.2884, pruned_loss=0.06434, over 1016555.54 frames.], batch size: 22, lr: 1.26e-03 +2022-05-27 16:34:18,167 INFO [train.py:823] (1/4) Epoch 13, batch 300, loss[loss=0.1565, simple_loss=0.24, pruned_loss=0.0365, over 7292.00 frames.], tot_loss[loss=0.2061, simple_loss=0.2869, pruned_loss=0.06266, over 1109464.86 frames.], batch size: 17, lr: 1.26e-03 +2022-05-27 16:34:57,069 INFO [train.py:823] (1/4) Epoch 13, batch 350, loss[loss=0.1867, simple_loss=0.2732, pruned_loss=0.05006, over 6537.00 frames.], tot_loss[loss=0.2064, simple_loss=0.2873, pruned_loss=0.06273, over 1176687.28 frames.], batch size: 34, lr: 1.26e-03 +2022-05-27 16:35:36,339 INFO [train.py:823] (1/4) Epoch 13, batch 400, loss[loss=0.2116, simple_loss=0.2994, pruned_loss=0.06188, over 7055.00 frames.], tot_loss[loss=0.2063, simple_loss=0.2874, pruned_loss=0.06259, over 1230350.63 frames.], batch size: 26, lr: 1.25e-03 +2022-05-27 16:36:15,506 INFO [train.py:823] (1/4) Epoch 13, batch 450, loss[loss=0.2116, simple_loss=0.3003, pruned_loss=0.06148, over 6981.00 frames.], tot_loss[loss=0.2064, simple_loss=0.2874, pruned_loss=0.06274, over 1267342.17 frames.], batch size: 29, lr: 1.25e-03 +2022-05-27 16:36:55,099 INFO [train.py:823] (1/4) Epoch 13, batch 500, loss[loss=0.2399, simple_loss=0.3245, pruned_loss=0.07766, over 6881.00 frames.], tot_loss[loss=0.2052, simple_loss=0.286, pruned_loss=0.06219, over 1300354.43 frames.], batch size: 29, lr: 1.25e-03 +2022-05-27 16:37:34,234 INFO [train.py:823] (1/4) Epoch 13, batch 550, loss[loss=0.2126, simple_loss=0.2884, pruned_loss=0.06837, over 7288.00 frames.], tot_loss[loss=0.2056, simple_loss=0.2858, pruned_loss=0.06267, over 1321505.13 frames.], batch size: 19, lr: 1.25e-03 +2022-05-27 16:38:13,511 INFO [train.py:823] (1/4) Epoch 13, batch 600, loss[loss=0.1707, simple_loss=0.2622, pruned_loss=0.0396, over 7272.00 frames.], tot_loss[loss=0.2064, simple_loss=0.2871, pruned_loss=0.06286, over 1343695.48 frames.], batch size: 20, lr: 1.24e-03 +2022-05-27 16:38:53,766 INFO [train.py:823] (1/4) Epoch 13, batch 650, loss[loss=0.2078, simple_loss=0.3006, pruned_loss=0.05756, over 7195.00 frames.], tot_loss[loss=0.2063, simple_loss=0.2876, pruned_loss=0.06254, over 1359935.65 frames.], batch size: 19, lr: 1.24e-03 +2022-05-27 16:39:33,146 INFO [train.py:823] (1/4) Epoch 13, batch 700, loss[loss=0.1867, simple_loss=0.2608, pruned_loss=0.05631, over 7036.00 frames.], tot_loss[loss=0.2068, simple_loss=0.2881, pruned_loss=0.06273, over 1371171.67 frames.], batch size: 17, lr: 1.24e-03 +2022-05-27 16:40:12,571 INFO [train.py:823] (1/4) Epoch 13, batch 750, loss[loss=0.2312, simple_loss=0.3114, pruned_loss=0.07546, over 6928.00 frames.], tot_loss[loss=0.2067, simple_loss=0.2884, pruned_loss=0.06248, over 1379078.48 frames.], batch size: 29, lr: 1.24e-03 +2022-05-27 16:40:51,205 INFO [train.py:823] (1/4) Epoch 13, batch 800, loss[loss=0.242, simple_loss=0.3235, pruned_loss=0.08029, over 7155.00 frames.], tot_loss[loss=0.2061, simple_loss=0.2881, pruned_loss=0.06206, over 1385656.16 frames.], batch size: 23, lr: 1.24e-03 +2022-05-27 16:41:30,431 INFO [train.py:823] (1/4) Epoch 13, batch 850, loss[loss=0.2048, simple_loss=0.2876, pruned_loss=0.06105, over 7283.00 frames.], tot_loss[loss=0.2061, simple_loss=0.2879, pruned_loss=0.06213, over 1396223.96 frames.], batch size: 20, lr: 1.23e-03 +2022-05-27 16:42:09,480 INFO [train.py:823] (1/4) Epoch 13, batch 900, loss[loss=0.2086, simple_loss=0.2763, pruned_loss=0.07048, over 7275.00 frames.], tot_loss[loss=0.2061, simple_loss=0.2879, pruned_loss=0.06216, over 1395856.24 frames.], batch size: 19, lr: 1.23e-03 +2022-05-27 16:42:48,622 INFO [train.py:823] (1/4) Epoch 13, batch 950, loss[loss=0.1907, simple_loss=0.2649, pruned_loss=0.05819, over 7022.00 frames.], tot_loss[loss=0.2056, simple_loss=0.2874, pruned_loss=0.06194, over 1394385.02 frames.], batch size: 16, lr: 1.23e-03 +2022-05-27 16:43:01,800 INFO [train.py:823] (1/4) Epoch 14, batch 0, loss[loss=0.2131, simple_loss=0.3039, pruned_loss=0.06116, over 7292.00 frames.], tot_loss[loss=0.2131, simple_loss=0.3039, pruned_loss=0.06116, over 7292.00 frames.], batch size: 22, lr: 1.19e-03 +2022-05-27 16:43:41,514 INFO [train.py:823] (1/4) Epoch 14, batch 50, loss[loss=0.2507, simple_loss=0.3266, pruned_loss=0.0874, over 7208.00 frames.], tot_loss[loss=0.1979, simple_loss=0.2802, pruned_loss=0.05785, over 325006.14 frames.], batch size: 25, lr: 1.19e-03 +2022-05-27 16:44:20,902 INFO [train.py:823] (1/4) Epoch 14, batch 100, loss[loss=0.22, simple_loss=0.298, pruned_loss=0.07096, over 7208.00 frames.], tot_loss[loss=0.2002, simple_loss=0.2831, pruned_loss=0.0587, over 570824.44 frames.], batch size: 24, lr: 1.19e-03 +2022-05-27 16:44:59,969 INFO [train.py:823] (1/4) Epoch 14, batch 150, loss[loss=0.1992, simple_loss=0.283, pruned_loss=0.05776, over 7283.00 frames.], tot_loss[loss=0.2008, simple_loss=0.2836, pruned_loss=0.05896, over 756840.20 frames.], batch size: 21, lr: 1.18e-03 +2022-05-27 16:45:39,628 INFO [train.py:823] (1/4) Epoch 14, batch 200, loss[loss=0.2068, simple_loss=0.2828, pruned_loss=0.06541, over 7383.00 frames.], tot_loss[loss=0.201, simple_loss=0.2843, pruned_loss=0.05883, over 902175.57 frames.], batch size: 21, lr: 1.18e-03 +2022-05-27 16:46:18,468 INFO [train.py:823] (1/4) Epoch 14, batch 250, loss[loss=0.1628, simple_loss=0.2411, pruned_loss=0.04223, over 7297.00 frames.], tot_loss[loss=0.199, simple_loss=0.2818, pruned_loss=0.05803, over 1020148.03 frames.], batch size: 19, lr: 1.18e-03 +2022-05-27 16:46:57,740 INFO [train.py:823] (1/4) Epoch 14, batch 300, loss[loss=0.2103, simple_loss=0.2954, pruned_loss=0.06259, over 6461.00 frames.], tot_loss[loss=0.1999, simple_loss=0.2829, pruned_loss=0.05846, over 1100235.52 frames.], batch size: 34, lr: 1.18e-03 +2022-05-27 16:47:36,970 INFO [train.py:823] (1/4) Epoch 14, batch 350, loss[loss=0.1781, simple_loss=0.2668, pruned_loss=0.04464, over 7293.00 frames.], tot_loss[loss=0.2013, simple_loss=0.2845, pruned_loss=0.05908, over 1176304.91 frames.], batch size: 19, lr: 1.18e-03 +2022-05-27 16:48:16,013 INFO [train.py:823] (1/4) Epoch 14, batch 400, loss[loss=0.2108, simple_loss=0.2856, pruned_loss=0.06799, over 7290.00 frames.], tot_loss[loss=0.2015, simple_loss=0.2843, pruned_loss=0.05932, over 1230654.99 frames.], batch size: 19, lr: 1.17e-03 +2022-05-27 16:48:54,591 INFO [train.py:823] (1/4) Epoch 14, batch 450, loss[loss=0.1972, simple_loss=0.2719, pruned_loss=0.06126, over 7082.00 frames.], tot_loss[loss=0.2018, simple_loss=0.2849, pruned_loss=0.0593, over 1268695.31 frames.], batch size: 18, lr: 1.17e-03 +2022-05-27 16:49:33,653 INFO [train.py:823] (1/4) Epoch 14, batch 500, loss[loss=0.2154, simple_loss=0.3037, pruned_loss=0.06358, over 7189.00 frames.], tot_loss[loss=0.2017, simple_loss=0.2846, pruned_loss=0.05941, over 1302907.22 frames.], batch size: 21, lr: 1.17e-03 +2022-05-27 16:50:12,833 INFO [train.py:823] (1/4) Epoch 14, batch 550, loss[loss=0.2177, simple_loss=0.2908, pruned_loss=0.07236, over 7237.00 frames.], tot_loss[loss=0.2007, simple_loss=0.2836, pruned_loss=0.05894, over 1333123.45 frames.], batch size: 25, lr: 1.17e-03 +2022-05-27 16:50:52,405 INFO [train.py:823] (1/4) Epoch 14, batch 600, loss[loss=0.2071, simple_loss=0.2753, pruned_loss=0.06947, over 7382.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2819, pruned_loss=0.05883, over 1353975.76 frames.], batch size: 19, lr: 1.17e-03 +2022-05-27 16:51:31,479 INFO [train.py:823] (1/4) Epoch 14, batch 650, loss[loss=0.1558, simple_loss=0.2399, pruned_loss=0.03582, over 7296.00 frames.], tot_loss[loss=0.1993, simple_loss=0.2819, pruned_loss=0.0583, over 1367459.22 frames.], batch size: 17, lr: 1.16e-03 +2022-05-27 16:52:10,490 INFO [train.py:823] (1/4) Epoch 14, batch 700, loss[loss=0.2108, simple_loss=0.3017, pruned_loss=0.05993, over 7281.00 frames.], tot_loss[loss=0.1994, simple_loss=0.2819, pruned_loss=0.05847, over 1375870.03 frames.], batch size: 21, lr: 1.16e-03 +2022-05-27 16:52:49,106 INFO [train.py:823] (1/4) Epoch 14, batch 750, loss[loss=0.2069, simple_loss=0.2966, pruned_loss=0.05859, over 7114.00 frames.], tot_loss[loss=0.1994, simple_loss=0.2825, pruned_loss=0.05817, over 1386581.89 frames.], batch size: 20, lr: 1.16e-03 +2022-05-27 16:53:28,611 INFO [train.py:823] (1/4) Epoch 14, batch 800, loss[loss=0.183, simple_loss=0.2678, pruned_loss=0.04913, over 7193.00 frames.], tot_loss[loss=0.1997, simple_loss=0.2826, pruned_loss=0.05842, over 1393048.44 frames.], batch size: 19, lr: 1.16e-03 +2022-05-27 16:54:09,127 INFO [train.py:823] (1/4) Epoch 14, batch 850, loss[loss=0.2111, simple_loss=0.2967, pruned_loss=0.06268, over 7273.00 frames.], tot_loss[loss=0.2, simple_loss=0.2824, pruned_loss=0.05882, over 1396555.31 frames.], batch size: 20, lr: 1.16e-03 +2022-05-27 16:54:48,399 INFO [train.py:823] (1/4) Epoch 14, batch 900, loss[loss=0.2237, simple_loss=0.3009, pruned_loss=0.07328, over 7018.00 frames.], tot_loss[loss=0.1997, simple_loss=0.2824, pruned_loss=0.05857, over 1400832.79 frames.], batch size: 17, lr: 1.15e-03 +2022-05-27 16:55:39,858 INFO [train.py:823] (1/4) Epoch 15, batch 0, loss[loss=0.1682, simple_loss=0.2571, pruned_loss=0.03965, over 7201.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2571, pruned_loss=0.03965, over 7201.00 frames.], batch size: 19, lr: 1.12e-03 +2022-05-27 16:56:18,948 INFO [train.py:823] (1/4) Epoch 15, batch 50, loss[loss=0.1949, simple_loss=0.264, pruned_loss=0.06293, over 7189.00 frames.], tot_loss[loss=0.2056, simple_loss=0.2873, pruned_loss=0.06199, over 319378.95 frames.], batch size: 18, lr: 1.12e-03 +2022-05-27 16:56:57,708 INFO [train.py:823] (1/4) Epoch 15, batch 100, loss[loss=0.2129, simple_loss=0.2987, pruned_loss=0.06351, over 7407.00 frames.], tot_loss[loss=0.1975, simple_loss=0.2806, pruned_loss=0.05719, over 558828.35 frames.], batch size: 22, lr: 1.11e-03 +2022-05-27 16:57:38,769 INFO [train.py:823] (1/4) Epoch 15, batch 150, loss[loss=0.171, simple_loss=0.2429, pruned_loss=0.04955, over 7295.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2794, pruned_loss=0.05719, over 752263.05 frames.], batch size: 17, lr: 1.11e-03 +2022-05-27 16:58:17,745 INFO [train.py:823] (1/4) Epoch 15, batch 200, loss[loss=0.2, simple_loss=0.2854, pruned_loss=0.05726, over 7161.00 frames.], tot_loss[loss=0.1974, simple_loss=0.2802, pruned_loss=0.05728, over 898715.05 frames.], batch size: 23, lr: 1.11e-03 +2022-05-27 16:58:57,128 INFO [train.py:823] (1/4) Epoch 15, batch 250, loss[loss=0.1962, simple_loss=0.2732, pruned_loss=0.05962, over 6514.00 frames.], tot_loss[loss=0.1968, simple_loss=0.2803, pruned_loss=0.05661, over 1015364.96 frames.], batch size: 34, lr: 1.11e-03 +2022-05-27 16:59:36,260 INFO [train.py:823] (1/4) Epoch 15, batch 300, loss[loss=0.2216, simple_loss=0.2956, pruned_loss=0.0738, over 7193.00 frames.], tot_loss[loss=0.196, simple_loss=0.2792, pruned_loss=0.05637, over 1105028.20 frames.], batch size: 18, lr: 1.11e-03 +2022-05-27 17:00:15,461 INFO [train.py:823] (1/4) Epoch 15, batch 350, loss[loss=0.1924, simple_loss=0.2794, pruned_loss=0.0527, over 7377.00 frames.], tot_loss[loss=0.1966, simple_loss=0.2805, pruned_loss=0.05636, over 1177331.64 frames.], batch size: 20, lr: 1.10e-03 +2022-05-27 17:00:54,481 INFO [train.py:823] (1/4) Epoch 15, batch 400, loss[loss=0.2137, simple_loss=0.2941, pruned_loss=0.06667, over 7098.00 frames.], tot_loss[loss=0.1965, simple_loss=0.2803, pruned_loss=0.05635, over 1228584.89 frames.], batch size: 19, lr: 1.10e-03 +2022-05-27 17:01:34,114 INFO [train.py:823] (1/4) Epoch 15, batch 450, loss[loss=0.2083, simple_loss=0.2955, pruned_loss=0.06053, over 7236.00 frames.], tot_loss[loss=0.1967, simple_loss=0.2802, pruned_loss=0.05664, over 1276489.29 frames.], batch size: 24, lr: 1.10e-03 +2022-05-27 17:02:13,039 INFO [train.py:823] (1/4) Epoch 15, batch 500, loss[loss=0.1847, simple_loss=0.2921, pruned_loss=0.03866, over 7125.00 frames.], tot_loss[loss=0.1972, simple_loss=0.2809, pruned_loss=0.05677, over 1311811.94 frames.], batch size: 20, lr: 1.10e-03 +2022-05-27 17:02:53,416 INFO [train.py:823] (1/4) Epoch 15, batch 550, loss[loss=0.1823, simple_loss=0.2565, pruned_loss=0.05404, over 7028.00 frames.], tot_loss[loss=0.1973, simple_loss=0.2808, pruned_loss=0.05689, over 1331534.18 frames.], batch size: 17, lr: 1.10e-03 +2022-05-27 17:03:32,322 INFO [train.py:823] (1/4) Epoch 15, batch 600, loss[loss=0.169, simple_loss=0.2642, pruned_loss=0.03685, over 7290.00 frames.], tot_loss[loss=0.1979, simple_loss=0.2812, pruned_loss=0.05727, over 1355716.88 frames.], batch size: 19, lr: 1.10e-03 +2022-05-27 17:04:11,955 INFO [train.py:823] (1/4) Epoch 15, batch 650, loss[loss=0.2162, simple_loss=0.3007, pruned_loss=0.06584, over 7171.00 frames.], tot_loss[loss=0.1981, simple_loss=0.2814, pruned_loss=0.05736, over 1367640.01 frames.], batch size: 22, lr: 1.09e-03 +2022-05-27 17:04:51,130 INFO [train.py:823] (1/4) Epoch 15, batch 700, loss[loss=0.2062, simple_loss=0.2874, pruned_loss=0.06252, over 6903.00 frames.], tot_loss[loss=0.1981, simple_loss=0.2813, pruned_loss=0.05747, over 1382522.50 frames.], batch size: 29, lr: 1.09e-03 +2022-05-27 17:05:30,525 INFO [train.py:823] (1/4) Epoch 15, batch 750, loss[loss=0.1885, simple_loss=0.2785, pruned_loss=0.04925, over 4775.00 frames.], tot_loss[loss=0.1978, simple_loss=0.281, pruned_loss=0.05729, over 1385617.91 frames.], batch size: 47, lr: 1.09e-03 +2022-05-27 17:06:09,170 INFO [train.py:823] (1/4) Epoch 15, batch 800, loss[loss=0.1559, simple_loss=0.2416, pruned_loss=0.03515, over 7197.00 frames.], tot_loss[loss=0.1978, simple_loss=0.2813, pruned_loss=0.0571, over 1390074.48 frames.], batch size: 19, lr: 1.09e-03 +2022-05-27 17:06:48,511 INFO [train.py:823] (1/4) Epoch 15, batch 850, loss[loss=0.2202, simple_loss=0.2991, pruned_loss=0.0707, over 7208.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2803, pruned_loss=0.05675, over 1394921.89 frames.], batch size: 25, lr: 1.09e-03 +2022-05-27 17:07:27,481 INFO [train.py:823] (1/4) Epoch 15, batch 900, loss[loss=0.1795, simple_loss=0.2711, pruned_loss=0.0439, over 7090.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2807, pruned_loss=0.05657, over 1399779.89 frames.], batch size: 19, lr: 1.09e-03 +2022-05-27 17:08:06,668 INFO [train.py:823] (1/4) Epoch 15, batch 950, loss[loss=0.1943, simple_loss=0.272, pruned_loss=0.05833, over 4746.00 frames.], tot_loss[loss=0.197, simple_loss=0.2808, pruned_loss=0.05665, over 1381080.09 frames.], batch size: 46, lr: 1.08e-03 +2022-05-27 17:08:19,719 INFO [train.py:823] (1/4) Epoch 16, batch 0, loss[loss=0.1985, simple_loss=0.28, pruned_loss=0.05853, over 5155.00 frames.], tot_loss[loss=0.1985, simple_loss=0.28, pruned_loss=0.05853, over 5155.00 frames.], batch size: 47, lr: 1.05e-03 +2022-05-27 17:08:58,732 INFO [train.py:823] (1/4) Epoch 16, batch 50, loss[loss=0.1644, simple_loss=0.238, pruned_loss=0.04536, over 7018.00 frames.], tot_loss[loss=0.1895, simple_loss=0.2743, pruned_loss=0.05232, over 318884.96 frames.], batch size: 16, lr: 1.05e-03 +2022-05-27 17:09:38,397 INFO [train.py:823] (1/4) Epoch 16, batch 100, loss[loss=0.185, simple_loss=0.28, pruned_loss=0.04503, over 7203.00 frames.], tot_loss[loss=0.1906, simple_loss=0.2741, pruned_loss=0.05358, over 560756.47 frames.], batch size: 19, lr: 1.05e-03 +2022-05-27 17:10:17,941 INFO [train.py:823] (1/4) Epoch 16, batch 150, loss[loss=0.1734, simple_loss=0.261, pruned_loss=0.0429, over 7402.00 frames.], tot_loss[loss=0.1892, simple_loss=0.2737, pruned_loss=0.05232, over 756175.59 frames.], batch size: 19, lr: 1.05e-03 +2022-05-27 17:10:57,590 INFO [train.py:823] (1/4) Epoch 16, batch 200, loss[loss=0.2222, simple_loss=0.3046, pruned_loss=0.06985, over 7170.00 frames.], tot_loss[loss=0.1913, simple_loss=0.2758, pruned_loss=0.05335, over 904234.88 frames.], batch size: 23, lr: 1.05e-03 +2022-05-27 17:11:36,357 INFO [train.py:823] (1/4) Epoch 16, batch 250, loss[loss=0.1925, simple_loss=0.2858, pruned_loss=0.04956, over 7220.00 frames.], tot_loss[loss=0.1916, simple_loss=0.2764, pruned_loss=0.05343, over 1013068.72 frames.], batch size: 25, lr: 1.04e-03 +2022-05-27 17:12:15,985 INFO [train.py:823] (1/4) Epoch 16, batch 300, loss[loss=0.2245, simple_loss=0.3021, pruned_loss=0.07345, over 7260.00 frames.], tot_loss[loss=0.192, simple_loss=0.2765, pruned_loss=0.05373, over 1106088.19 frames.], batch size: 24, lr: 1.04e-03 +2022-05-27 17:12:55,398 INFO [train.py:823] (1/4) Epoch 16, batch 350, loss[loss=0.2127, simple_loss=0.2888, pruned_loss=0.0683, over 7336.00 frames.], tot_loss[loss=0.1928, simple_loss=0.2772, pruned_loss=0.05421, over 1173328.42 frames.], batch size: 23, lr: 1.04e-03 +2022-05-27 17:13:34,553 INFO [train.py:823] (1/4) Epoch 16, batch 400, loss[loss=0.1569, simple_loss=0.2448, pruned_loss=0.03447, over 7292.00 frames.], tot_loss[loss=0.1934, simple_loss=0.2776, pruned_loss=0.05458, over 1228523.17 frames.], batch size: 19, lr: 1.04e-03 +2022-05-27 17:14:13,504 INFO [train.py:823] (1/4) Epoch 16, batch 450, loss[loss=0.1951, simple_loss=0.295, pruned_loss=0.04762, over 7405.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2779, pruned_loss=0.05435, over 1275783.12 frames.], batch size: 22, lr: 1.04e-03 +2022-05-27 17:14:53,408 INFO [train.py:823] (1/4) Epoch 16, batch 500, loss[loss=0.1975, simple_loss=0.2902, pruned_loss=0.05239, over 6927.00 frames.], tot_loss[loss=0.1929, simple_loss=0.2777, pruned_loss=0.05408, over 1311362.79 frames.], batch size: 29, lr: 1.04e-03 +2022-05-27 17:15:32,516 INFO [train.py:823] (1/4) Epoch 16, batch 550, loss[loss=0.2525, simple_loss=0.3352, pruned_loss=0.08492, over 7380.00 frames.], tot_loss[loss=0.1945, simple_loss=0.2792, pruned_loss=0.05493, over 1329592.72 frames.], batch size: 21, lr: 1.03e-03 +2022-05-27 17:16:11,581 INFO [train.py:823] (1/4) Epoch 16, batch 600, loss[loss=0.188, simple_loss=0.2769, pruned_loss=0.04951, over 7117.00 frames.], tot_loss[loss=0.1942, simple_loss=0.2791, pruned_loss=0.05464, over 1345281.72 frames.], batch size: 19, lr: 1.03e-03 +2022-05-27 17:16:52,279 INFO [train.py:823] (1/4) Epoch 16, batch 650, loss[loss=0.1926, simple_loss=0.2735, pruned_loss=0.05582, over 7203.00 frames.], tot_loss[loss=0.1956, simple_loss=0.2802, pruned_loss=0.05549, over 1361359.17 frames.], batch size: 16, lr: 1.03e-03 +2022-05-27 17:17:31,503 INFO [train.py:823] (1/4) Epoch 16, batch 700, loss[loss=0.1554, simple_loss=0.2388, pruned_loss=0.03601, over 7299.00 frames.], tot_loss[loss=0.1945, simple_loss=0.2789, pruned_loss=0.055, over 1370199.47 frames.], batch size: 19, lr: 1.03e-03 +2022-05-27 17:18:11,836 INFO [train.py:823] (1/4) Epoch 16, batch 750, loss[loss=0.1747, simple_loss=0.2553, pruned_loss=0.04704, over 7195.00 frames.], tot_loss[loss=0.1951, simple_loss=0.2796, pruned_loss=0.05528, over 1382580.48 frames.], batch size: 18, lr: 1.03e-03 +2022-05-27 17:18:51,381 INFO [train.py:823] (1/4) Epoch 16, batch 800, loss[loss=0.2001, simple_loss=0.2883, pruned_loss=0.05598, over 7384.00 frames.], tot_loss[loss=0.195, simple_loss=0.2798, pruned_loss=0.05506, over 1393197.42 frames.], batch size: 20, lr: 1.03e-03 +2022-05-27 17:19:30,548 INFO [train.py:823] (1/4) Epoch 16, batch 850, loss[loss=0.2212, simple_loss=0.3063, pruned_loss=0.06807, over 7190.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2797, pruned_loss=0.05506, over 1400301.49 frames.], batch size: 21, lr: 1.03e-03 +2022-05-27 17:20:11,122 INFO [train.py:823] (1/4) Epoch 16, batch 900, loss[loss=0.1936, simple_loss=0.2763, pruned_loss=0.05541, over 7026.00 frames.], tot_loss[loss=0.1944, simple_loss=0.2791, pruned_loss=0.05483, over 1400833.37 frames.], batch size: 17, lr: 1.02e-03 +2022-05-27 17:21:02,483 INFO [train.py:823] (1/4) Epoch 17, batch 0, loss[loss=0.1913, simple_loss=0.277, pruned_loss=0.05276, over 7192.00 frames.], tot_loss[loss=0.1913, simple_loss=0.277, pruned_loss=0.05276, over 7192.00 frames.], batch size: 21, lr: 9.94e-04 +2022-05-27 17:21:41,993 INFO [train.py:823] (1/4) Epoch 17, batch 50, loss[loss=0.2118, simple_loss=0.2948, pruned_loss=0.0644, over 6998.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2824, pruned_loss=0.0586, over 315225.62 frames.], batch size: 26, lr: 9.92e-04 +2022-05-27 17:22:21,123 INFO [train.py:823] (1/4) Epoch 17, batch 100, loss[loss=0.2004, simple_loss=0.2881, pruned_loss=0.05638, over 7019.00 frames.], tot_loss[loss=0.1967, simple_loss=0.2812, pruned_loss=0.05612, over 560731.90 frames.], batch size: 26, lr: 9.91e-04 +2022-05-27 17:23:00,054 INFO [train.py:823] (1/4) Epoch 17, batch 150, loss[loss=0.1953, simple_loss=0.2701, pruned_loss=0.0603, over 7195.00 frames.], tot_loss[loss=0.1944, simple_loss=0.279, pruned_loss=0.05493, over 748940.06 frames.], batch size: 18, lr: 9.89e-04 +2022-05-27 17:23:38,681 INFO [train.py:823] (1/4) Epoch 17, batch 200, loss[loss=0.2197, simple_loss=0.3097, pruned_loss=0.06486, over 6951.00 frames.], tot_loss[loss=0.1945, simple_loss=0.2796, pruned_loss=0.05467, over 897614.68 frames.], batch size: 29, lr: 9.88e-04 +2022-05-27 17:24:17,993 INFO [train.py:823] (1/4) Epoch 17, batch 250, loss[loss=0.1789, simple_loss=0.2725, pruned_loss=0.04265, over 7332.00 frames.], tot_loss[loss=0.1925, simple_loss=0.2782, pruned_loss=0.05344, over 1018412.98 frames.], batch size: 23, lr: 9.86e-04 +2022-05-27 17:24:57,477 INFO [train.py:823] (1/4) Epoch 17, batch 300, loss[loss=0.1959, simple_loss=0.2705, pruned_loss=0.06064, over 7298.00 frames.], tot_loss[loss=0.1925, simple_loss=0.2782, pruned_loss=0.05338, over 1104100.57 frames.], batch size: 18, lr: 9.85e-04 +2022-05-27 17:25:36,774 INFO [train.py:823] (1/4) Epoch 17, batch 350, loss[loss=0.1804, simple_loss=0.2688, pruned_loss=0.04601, over 7397.00 frames.], tot_loss[loss=0.1929, simple_loss=0.2779, pruned_loss=0.05397, over 1170736.07 frames.], batch size: 19, lr: 9.84e-04 +2022-05-27 17:26:17,358 INFO [train.py:823] (1/4) Epoch 17, batch 400, loss[loss=0.1977, simple_loss=0.2799, pruned_loss=0.05776, over 7097.00 frames.], tot_loss[loss=0.1916, simple_loss=0.2763, pruned_loss=0.05348, over 1226781.03 frames.], batch size: 19, lr: 9.82e-04 +2022-05-27 17:26:56,041 INFO [train.py:823] (1/4) Epoch 17, batch 450, loss[loss=0.2297, simple_loss=0.3037, pruned_loss=0.07779, over 4918.00 frames.], tot_loss[loss=0.19, simple_loss=0.275, pruned_loss=0.05252, over 1261609.97 frames.], batch size: 48, lr: 9.81e-04 +2022-05-27 17:27:35,173 INFO [train.py:823] (1/4) Epoch 17, batch 500, loss[loss=0.1837, simple_loss=0.251, pruned_loss=0.05821, over 7000.00 frames.], tot_loss[loss=0.1898, simple_loss=0.2751, pruned_loss=0.05227, over 1297508.51 frames.], batch size: 16, lr: 9.79e-04 +2022-05-27 17:28:14,712 INFO [train.py:823] (1/4) Epoch 17, batch 550, loss[loss=0.1984, simple_loss=0.2893, pruned_loss=0.05371, over 7120.00 frames.], tot_loss[loss=0.1901, simple_loss=0.2754, pruned_loss=0.05246, over 1327112.02 frames.], batch size: 20, lr: 9.78e-04 +2022-05-27 17:28:53,850 INFO [train.py:823] (1/4) Epoch 17, batch 600, loss[loss=0.1964, simple_loss=0.2788, pruned_loss=0.05696, over 7306.00 frames.], tot_loss[loss=0.1903, simple_loss=0.2754, pruned_loss=0.05262, over 1349623.01 frames.], batch size: 22, lr: 9.76e-04 +2022-05-27 17:29:33,208 INFO [train.py:823] (1/4) Epoch 17, batch 650, loss[loss=0.1683, simple_loss=0.2412, pruned_loss=0.04768, over 7022.00 frames.], tot_loss[loss=0.1892, simple_loss=0.2745, pruned_loss=0.05193, over 1363176.76 frames.], batch size: 16, lr: 9.75e-04 +2022-05-27 17:30:12,325 INFO [train.py:823] (1/4) Epoch 17, batch 700, loss[loss=0.1752, simple_loss=0.2492, pruned_loss=0.05061, over 6806.00 frames.], tot_loss[loss=0.1892, simple_loss=0.2744, pruned_loss=0.05197, over 1374754.66 frames.], batch size: 15, lr: 9.74e-04 +2022-05-27 17:30:51,264 INFO [train.py:823] (1/4) Epoch 17, batch 750, loss[loss=0.1682, simple_loss=0.2481, pruned_loss=0.04417, over 7160.00 frames.], tot_loss[loss=0.19, simple_loss=0.2752, pruned_loss=0.05237, over 1386370.70 frames.], batch size: 17, lr: 9.72e-04 +2022-05-27 17:31:30,629 INFO [train.py:823] (1/4) Epoch 17, batch 800, loss[loss=0.1819, simple_loss=0.2546, pruned_loss=0.05461, over 6997.00 frames.], tot_loss[loss=0.1902, simple_loss=0.2755, pruned_loss=0.05248, over 1389098.65 frames.], batch size: 16, lr: 9.71e-04 +2022-05-27 17:32:13,776 INFO [train.py:823] (1/4) Epoch 17, batch 850, loss[loss=0.2108, simple_loss=0.3025, pruned_loss=0.05958, over 7409.00 frames.], tot_loss[loss=0.1901, simple_loss=0.2752, pruned_loss=0.05252, over 1395576.86 frames.], batch size: 22, lr: 9.69e-04 +2022-05-27 17:32:52,877 INFO [train.py:823] (1/4) Epoch 17, batch 900, loss[loss=0.1714, simple_loss=0.2487, pruned_loss=0.04704, over 7299.00 frames.], tot_loss[loss=0.1895, simple_loss=0.2744, pruned_loss=0.05228, over 1402048.40 frames.], batch size: 17, lr: 9.68e-04 +2022-05-27 17:33:32,090 INFO [train.py:823] (1/4) Epoch 17, batch 950, loss[loss=0.1906, simple_loss=0.2812, pruned_loss=0.05001, over 4799.00 frames.], tot_loss[loss=0.1901, simple_loss=0.2749, pruned_loss=0.05263, over 1397463.20 frames.], batch size: 47, lr: 9.67e-04 +2022-05-27 17:33:44,905 INFO [train.py:823] (1/4) Epoch 18, batch 0, loss[loss=0.2005, simple_loss=0.2899, pruned_loss=0.05551, over 7376.00 frames.], tot_loss[loss=0.2005, simple_loss=0.2899, pruned_loss=0.05551, over 7376.00 frames.], batch size: 21, lr: 9.41e-04 +2022-05-27 17:34:24,223 INFO [train.py:823] (1/4) Epoch 18, batch 50, loss[loss=0.173, simple_loss=0.2741, pruned_loss=0.03594, over 7348.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2727, pruned_loss=0.05134, over 321769.34 frames.], batch size: 23, lr: 9.40e-04 +2022-05-27 17:35:03,317 INFO [train.py:823] (1/4) Epoch 18, batch 100, loss[loss=0.183, simple_loss=0.2732, pruned_loss=0.04647, over 7279.00 frames.], tot_loss[loss=0.1899, simple_loss=0.2753, pruned_loss=0.0522, over 563165.73 frames.], batch size: 20, lr: 9.39e-04 +2022-05-27 17:35:42,638 INFO [train.py:823] (1/4) Epoch 18, batch 150, loss[loss=0.1682, simple_loss=0.2579, pruned_loss=0.03928, over 7196.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2735, pruned_loss=0.0509, over 756738.39 frames.], batch size: 20, lr: 9.37e-04 +2022-05-27 17:36:21,772 INFO [train.py:823] (1/4) Epoch 18, batch 200, loss[loss=0.2088, simple_loss=0.2996, pruned_loss=0.059, over 7279.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2737, pruned_loss=0.05084, over 906928.02 frames.], batch size: 21, lr: 9.36e-04 +2022-05-27 17:37:01,143 INFO [train.py:823] (1/4) Epoch 18, batch 250, loss[loss=0.1813, simple_loss=0.2746, pruned_loss=0.04404, over 7301.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2745, pruned_loss=0.05146, over 1016110.06 frames.], batch size: 22, lr: 9.35e-04 +2022-05-27 17:37:40,123 INFO [train.py:823] (1/4) Epoch 18, batch 300, loss[loss=0.1588, simple_loss=0.2439, pruned_loss=0.03682, over 7433.00 frames.], tot_loss[loss=0.1884, simple_loss=0.2742, pruned_loss=0.0513, over 1106428.59 frames.], batch size: 18, lr: 9.33e-04 +2022-05-27 17:38:19,152 INFO [train.py:823] (1/4) Epoch 18, batch 350, loss[loss=0.1904, simple_loss=0.2809, pruned_loss=0.04995, over 7282.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2741, pruned_loss=0.05164, over 1175733.34 frames.], batch size: 20, lr: 9.32e-04 +2022-05-27 17:38:58,304 INFO [train.py:823] (1/4) Epoch 18, batch 400, loss[loss=0.1926, simple_loss=0.2633, pruned_loss=0.06098, over 7381.00 frames.], tot_loss[loss=0.1895, simple_loss=0.2751, pruned_loss=0.05197, over 1227244.17 frames.], batch size: 19, lr: 9.31e-04 +2022-05-27 17:39:39,182 INFO [train.py:823] (1/4) Epoch 18, batch 450, loss[loss=0.2186, simple_loss=0.2971, pruned_loss=0.07001, over 7163.00 frames.], tot_loss[loss=0.1892, simple_loss=0.2754, pruned_loss=0.05152, over 1270391.35 frames.], batch size: 23, lr: 9.29e-04 +2022-05-27 17:40:18,334 INFO [train.py:823] (1/4) Epoch 18, batch 500, loss[loss=0.2031, simple_loss=0.3041, pruned_loss=0.05101, over 7413.00 frames.], tot_loss[loss=0.1884, simple_loss=0.2748, pruned_loss=0.05102, over 1308144.12 frames.], batch size: 22, lr: 9.28e-04 +2022-05-27 17:40:58,988 INFO [train.py:823] (1/4) Epoch 18, batch 550, loss[loss=0.1809, simple_loss=0.2747, pruned_loss=0.04361, over 7329.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2739, pruned_loss=0.05061, over 1335900.70 frames.], batch size: 23, lr: 9.27e-04 +2022-05-27 17:41:37,889 INFO [train.py:823] (1/4) Epoch 18, batch 600, loss[loss=0.1659, simple_loss=0.2539, pruned_loss=0.03897, over 7282.00 frames.], tot_loss[loss=0.1863, simple_loss=0.2728, pruned_loss=0.04988, over 1357721.73 frames.], batch size: 19, lr: 9.26e-04 +2022-05-27 17:42:17,207 INFO [train.py:823] (1/4) Epoch 18, batch 650, loss[loss=0.144, simple_loss=0.243, pruned_loss=0.02256, over 7101.00 frames.], tot_loss[loss=0.1863, simple_loss=0.273, pruned_loss=0.04987, over 1371954.76 frames.], batch size: 19, lr: 9.24e-04 +2022-05-27 17:42:57,778 INFO [train.py:823] (1/4) Epoch 18, batch 700, loss[loss=0.162, simple_loss=0.2536, pruned_loss=0.03522, over 7197.00 frames.], tot_loss[loss=0.1872, simple_loss=0.2733, pruned_loss=0.05053, over 1376247.89 frames.], batch size: 19, lr: 9.23e-04 +2022-05-27 17:43:37,102 INFO [train.py:823] (1/4) Epoch 18, batch 750, loss[loss=0.1558, simple_loss=0.242, pruned_loss=0.0348, over 7098.00 frames.], tot_loss[loss=0.1874, simple_loss=0.2735, pruned_loss=0.05061, over 1387924.43 frames.], batch size: 18, lr: 9.22e-04 +2022-05-27 17:44:16,125 INFO [train.py:823] (1/4) Epoch 18, batch 800, loss[loss=0.1969, simple_loss=0.2766, pruned_loss=0.05863, over 7194.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2735, pruned_loss=0.05072, over 1392026.48 frames.], batch size: 20, lr: 9.21e-04 +2022-05-27 17:44:55,762 INFO [train.py:823] (1/4) Epoch 18, batch 850, loss[loss=0.1741, simple_loss=0.2762, pruned_loss=0.03605, over 7180.00 frames.], tot_loss[loss=0.1861, simple_loss=0.2723, pruned_loss=0.0499, over 1395095.75 frames.], batch size: 21, lr: 9.19e-04 +2022-05-27 17:45:34,711 INFO [train.py:823] (1/4) Epoch 18, batch 900, loss[loss=0.1489, simple_loss=0.2303, pruned_loss=0.03379, over 7159.00 frames.], tot_loss[loss=0.1863, simple_loss=0.2724, pruned_loss=0.05012, over 1401934.25 frames.], batch size: 17, lr: 9.18e-04 +2022-05-27 17:46:13,681 INFO [train.py:823] (1/4) Epoch 18, batch 950, loss[loss=0.2119, simple_loss=0.2895, pruned_loss=0.06714, over 5164.00 frames.], tot_loss[loss=0.187, simple_loss=0.2729, pruned_loss=0.05059, over 1375890.26 frames.], batch size: 46, lr: 9.17e-04 +2022-05-27 17:46:27,011 INFO [train.py:823] (1/4) Epoch 19, batch 0, loss[loss=0.1946, simple_loss=0.2852, pruned_loss=0.05195, over 7012.00 frames.], tot_loss[loss=0.1946, simple_loss=0.2852, pruned_loss=0.05195, over 7012.00 frames.], batch size: 26, lr: 8.94e-04 +2022-05-27 17:47:05,746 INFO [train.py:823] (1/4) Epoch 19, batch 50, loss[loss=0.1875, simple_loss=0.2687, pruned_loss=0.05314, over 7187.00 frames.], tot_loss[loss=0.18, simple_loss=0.2656, pruned_loss=0.04718, over 325638.68 frames.], batch size: 19, lr: 8.92e-04 +2022-05-27 17:47:44,973 INFO [train.py:823] (1/4) Epoch 19, batch 100, loss[loss=0.2013, simple_loss=0.2888, pruned_loss=0.05687, over 6554.00 frames.], tot_loss[loss=0.1827, simple_loss=0.2679, pruned_loss=0.04874, over 566245.83 frames.], batch size: 34, lr: 8.91e-04 +2022-05-27 17:48:24,268 INFO [train.py:823] (1/4) Epoch 19, batch 150, loss[loss=0.1696, simple_loss=0.2483, pruned_loss=0.04545, over 7094.00 frames.], tot_loss[loss=0.1823, simple_loss=0.2676, pruned_loss=0.04848, over 758714.30 frames.], batch size: 18, lr: 8.90e-04 +2022-05-27 17:49:03,223 INFO [train.py:823] (1/4) Epoch 19, batch 200, loss[loss=0.1803, simple_loss=0.2688, pruned_loss=0.04588, over 7175.00 frames.], tot_loss[loss=0.1837, simple_loss=0.2691, pruned_loss=0.04912, over 901566.92 frames.], batch size: 22, lr: 8.89e-04 +2022-05-27 17:49:41,874 INFO [train.py:823] (1/4) Epoch 19, batch 250, loss[loss=0.2193, simple_loss=0.2913, pruned_loss=0.07363, over 7118.00 frames.], tot_loss[loss=0.1841, simple_loss=0.27, pruned_loss=0.04906, over 1017976.77 frames.], batch size: 19, lr: 8.88e-04 +2022-05-27 17:50:22,325 INFO [train.py:823] (1/4) Epoch 19, batch 300, loss[loss=0.1773, simple_loss=0.2551, pruned_loss=0.04973, over 7009.00 frames.], tot_loss[loss=0.1861, simple_loss=0.2714, pruned_loss=0.05041, over 1110083.84 frames.], batch size: 16, lr: 8.87e-04 +2022-05-27 17:51:01,421 INFO [train.py:823] (1/4) Epoch 19, batch 350, loss[loss=0.2513, simple_loss=0.3228, pruned_loss=0.08995, over 7314.00 frames.], tot_loss[loss=0.1876, simple_loss=0.273, pruned_loss=0.05106, over 1177471.48 frames.], batch size: 18, lr: 8.85e-04 +2022-05-27 17:51:40,817 INFO [train.py:823] (1/4) Epoch 19, batch 400, loss[loss=0.1781, simple_loss=0.2549, pruned_loss=0.05066, over 7017.00 frames.], tot_loss[loss=0.1872, simple_loss=0.273, pruned_loss=0.0507, over 1235055.68 frames.], batch size: 16, lr: 8.84e-04 +2022-05-27 17:52:20,010 INFO [train.py:823] (1/4) Epoch 19, batch 450, loss[loss=0.2395, simple_loss=0.3132, pruned_loss=0.08286, over 7141.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2735, pruned_loss=0.05087, over 1277048.05 frames.], batch size: 23, lr: 8.83e-04 +2022-05-27 17:52:59,892 INFO [train.py:823] (1/4) Epoch 19, batch 500, loss[loss=0.1958, simple_loss=0.2844, pruned_loss=0.0536, over 6519.00 frames.], tot_loss[loss=0.1865, simple_loss=0.2725, pruned_loss=0.05027, over 1310057.46 frames.], batch size: 34, lr: 8.82e-04 +2022-05-27 17:53:39,209 INFO [train.py:823] (1/4) Epoch 19, batch 550, loss[loss=0.1613, simple_loss=0.2406, pruned_loss=0.04105, over 7022.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2719, pruned_loss=0.04977, over 1332813.60 frames.], batch size: 17, lr: 8.81e-04 +2022-05-27 17:54:18,348 INFO [train.py:823] (1/4) Epoch 19, batch 600, loss[loss=0.1731, simple_loss=0.2632, pruned_loss=0.04148, over 7101.00 frames.], tot_loss[loss=0.1859, simple_loss=0.2721, pruned_loss=0.04983, over 1352661.02 frames.], batch size: 19, lr: 8.80e-04 +2022-05-27 17:54:57,232 INFO [train.py:823] (1/4) Epoch 19, batch 650, loss[loss=0.1498, simple_loss=0.2371, pruned_loss=0.03122, over 7431.00 frames.], tot_loss[loss=0.1848, simple_loss=0.2713, pruned_loss=0.04922, over 1367283.40 frames.], batch size: 18, lr: 8.78e-04 +2022-05-27 17:55:36,371 INFO [train.py:823] (1/4) Epoch 19, batch 700, loss[loss=0.193, simple_loss=0.2798, pruned_loss=0.05308, over 6977.00 frames.], tot_loss[loss=0.1836, simple_loss=0.2703, pruned_loss=0.04849, over 1378039.12 frames.], batch size: 26, lr: 8.77e-04 +2022-05-27 17:56:14,894 INFO [train.py:823] (1/4) Epoch 19, batch 750, loss[loss=0.1783, simple_loss=0.2686, pruned_loss=0.04406, over 7375.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2702, pruned_loss=0.04876, over 1389729.38 frames.], batch size: 21, lr: 8.76e-04 +2022-05-27 17:56:54,496 INFO [train.py:823] (1/4) Epoch 19, batch 800, loss[loss=0.1874, simple_loss=0.2775, pruned_loss=0.04865, over 7312.00 frames.], tot_loss[loss=0.1842, simple_loss=0.2705, pruned_loss=0.04895, over 1398716.66 frames.], batch size: 22, lr: 8.75e-04 +2022-05-27 17:57:33,461 INFO [train.py:823] (1/4) Epoch 19, batch 850, loss[loss=0.1757, simple_loss=0.2701, pruned_loss=0.04068, over 7386.00 frames.], tot_loss[loss=0.1846, simple_loss=0.2709, pruned_loss=0.04911, over 1403499.84 frames.], batch size: 21, lr: 8.74e-04 +2022-05-27 17:58:12,601 INFO [train.py:823] (1/4) Epoch 19, batch 900, loss[loss=0.1801, simple_loss=0.2688, pruned_loss=0.04572, over 7029.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2717, pruned_loss=0.04985, over 1393411.70 frames.], batch size: 26, lr: 8.73e-04 +2022-05-27 17:59:02,771 INFO [train.py:823] (1/4) Epoch 20, batch 0, loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04077, over 6502.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2631, pruned_loss=0.04077, over 6502.00 frames.], batch size: 34, lr: 8.51e-04 +2022-05-27 17:59:42,293 INFO [train.py:823] (1/4) Epoch 20, batch 50, loss[loss=0.184, simple_loss=0.266, pruned_loss=0.05096, over 7296.00 frames.], tot_loss[loss=0.1842, simple_loss=0.2733, pruned_loss=0.04755, over 322458.19 frames.], batch size: 18, lr: 8.49e-04 +2022-05-27 18:00:21,135 INFO [train.py:823] (1/4) Epoch 20, batch 100, loss[loss=0.1782, simple_loss=0.2691, pruned_loss=0.04362, over 4719.00 frames.], tot_loss[loss=0.184, simple_loss=0.272, pruned_loss=0.04799, over 561857.89 frames.], batch size: 46, lr: 8.48e-04 +2022-05-27 18:01:00,376 INFO [train.py:823] (1/4) Epoch 20, batch 150, loss[loss=0.1513, simple_loss=0.2365, pruned_loss=0.03307, over 7293.00 frames.], tot_loss[loss=0.1833, simple_loss=0.2702, pruned_loss=0.04818, over 751450.44 frames.], batch size: 17, lr: 8.47e-04 +2022-05-27 18:01:41,346 INFO [train.py:823] (1/4) Epoch 20, batch 200, loss[loss=0.1548, simple_loss=0.2311, pruned_loss=0.03931, over 7004.00 frames.], tot_loss[loss=0.1825, simple_loss=0.269, pruned_loss=0.04797, over 902554.66 frames.], batch size: 16, lr: 8.46e-04 +2022-05-27 18:02:20,203 INFO [train.py:823] (1/4) Epoch 20, batch 250, loss[loss=0.1724, simple_loss=0.2539, pruned_loss=0.04546, over 7305.00 frames.], tot_loss[loss=0.1831, simple_loss=0.2693, pruned_loss=0.04847, over 1017441.11 frames.], batch size: 18, lr: 8.45e-04 +2022-05-27 18:02:59,634 INFO [train.py:823] (1/4) Epoch 20, batch 300, loss[loss=0.1981, simple_loss=0.2947, pruned_loss=0.05078, over 7292.00 frames.], tot_loss[loss=0.1834, simple_loss=0.2699, pruned_loss=0.04846, over 1107388.31 frames.], batch size: 22, lr: 8.44e-04 +2022-05-27 18:03:38,766 INFO [train.py:823] (1/4) Epoch 20, batch 350, loss[loss=0.1741, simple_loss=0.2687, pruned_loss=0.03978, over 7207.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2705, pruned_loss=0.04868, over 1176473.67 frames.], batch size: 20, lr: 8.43e-04 +2022-05-27 18:04:18,918 INFO [train.py:823] (1/4) Epoch 20, batch 400, loss[loss=0.1772, simple_loss=0.2719, pruned_loss=0.04125, over 7152.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2698, pruned_loss=0.04805, over 1231891.63 frames.], batch size: 23, lr: 8.42e-04 +2022-05-27 18:04:57,908 INFO [train.py:823] (1/4) Epoch 20, batch 450, loss[loss=0.1464, simple_loss=0.2256, pruned_loss=0.03359, over 7153.00 frames.], tot_loss[loss=0.1841, simple_loss=0.2705, pruned_loss=0.0488, over 1270286.53 frames.], batch size: 17, lr: 8.41e-04 +2022-05-27 18:05:38,480 INFO [train.py:823] (1/4) Epoch 20, batch 500, loss[loss=0.1832, simple_loss=0.2657, pruned_loss=0.05034, over 7016.00 frames.], tot_loss[loss=0.1842, simple_loss=0.2705, pruned_loss=0.04891, over 1306230.86 frames.], batch size: 17, lr: 8.40e-04 +2022-05-27 18:06:18,324 INFO [train.py:823] (1/4) Epoch 20, batch 550, loss[loss=0.173, simple_loss=0.2644, pruned_loss=0.04083, over 7156.00 frames.], tot_loss[loss=0.1832, simple_loss=0.2694, pruned_loss=0.04845, over 1334613.22 frames.], batch size: 23, lr: 8.39e-04 +2022-05-27 18:06:57,327 INFO [train.py:823] (1/4) Epoch 20, batch 600, loss[loss=0.2033, simple_loss=0.2875, pruned_loss=0.05955, over 7101.00 frames.], tot_loss[loss=0.1827, simple_loss=0.2694, pruned_loss=0.04802, over 1349823.43 frames.], batch size: 18, lr: 8.38e-04 +2022-05-27 18:07:37,169 INFO [train.py:823] (1/4) Epoch 20, batch 650, loss[loss=0.1807, simple_loss=0.2812, pruned_loss=0.04012, over 7041.00 frames.], tot_loss[loss=0.1824, simple_loss=0.2691, pruned_loss=0.04789, over 1365387.50 frames.], batch size: 29, lr: 8.37e-04 +2022-05-27 18:08:16,117 INFO [train.py:823] (1/4) Epoch 20, batch 700, loss[loss=0.1632, simple_loss=0.2485, pruned_loss=0.03896, over 7096.00 frames.], tot_loss[loss=0.1826, simple_loss=0.2688, pruned_loss=0.04817, over 1380646.90 frames.], batch size: 18, lr: 8.36e-04 +2022-05-27 18:08:55,667 INFO [train.py:823] (1/4) Epoch 20, batch 750, loss[loss=0.1812, simple_loss=0.273, pruned_loss=0.04468, over 7279.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2678, pruned_loss=0.04746, over 1391193.79 frames.], batch size: 21, lr: 8.35e-04 +2022-05-27 18:09:34,498 INFO [train.py:823] (1/4) Epoch 20, batch 800, loss[loss=0.1721, simple_loss=0.262, pruned_loss=0.04109, over 7017.00 frames.], tot_loss[loss=0.1816, simple_loss=0.2688, pruned_loss=0.04721, over 1398845.08 frames.], batch size: 17, lr: 8.34e-04 +2022-05-27 18:10:13,636 INFO [train.py:823] (1/4) Epoch 20, batch 850, loss[loss=0.1805, simple_loss=0.2739, pruned_loss=0.04358, over 7040.00 frames.], tot_loss[loss=0.1808, simple_loss=0.2675, pruned_loss=0.04699, over 1402154.41 frames.], batch size: 26, lr: 8.33e-04 +2022-05-27 18:10:52,723 INFO [train.py:823] (1/4) Epoch 20, batch 900, loss[loss=0.1554, simple_loss=0.2435, pruned_loss=0.03366, over 6774.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2683, pruned_loss=0.0471, over 1399367.44 frames.], batch size: 15, lr: 8.31e-04 +2022-05-27 18:11:42,471 INFO [train.py:823] (1/4) Epoch 21, batch 0, loss[loss=0.1743, simple_loss=0.2545, pruned_loss=0.04708, over 7197.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2545, pruned_loss=0.04708, over 7197.00 frames.], batch size: 18, lr: 8.11e-04 +2022-05-27 18:12:21,414 INFO [train.py:823] (1/4) Epoch 21, batch 50, loss[loss=0.166, simple_loss=0.2645, pruned_loss=0.03374, over 7238.00 frames.], tot_loss[loss=0.1792, simple_loss=0.267, pruned_loss=0.04567, over 318338.56 frames.], batch size: 25, lr: 8.10e-04 +2022-05-27 18:13:00,541 INFO [train.py:823] (1/4) Epoch 21, batch 100, loss[loss=0.1615, simple_loss=0.2554, pruned_loss=0.03377, over 6414.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2653, pruned_loss=0.04569, over 561747.28 frames.], batch size: 34, lr: 8.09e-04 +2022-05-27 18:13:40,017 INFO [train.py:823] (1/4) Epoch 21, batch 150, loss[loss=0.1966, simple_loss=0.2915, pruned_loss=0.05084, over 7276.00 frames.], tot_loss[loss=0.1786, simple_loss=0.2656, pruned_loss=0.04585, over 755308.07 frames.], batch size: 20, lr: 8.08e-04 +2022-05-27 18:14:19,924 INFO [train.py:823] (1/4) Epoch 21, batch 200, loss[loss=0.1582, simple_loss=0.2374, pruned_loss=0.03954, over 7306.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2662, pruned_loss=0.04645, over 903517.31 frames.], batch size: 18, lr: 8.07e-04 +2022-05-27 18:14:59,157 INFO [train.py:823] (1/4) Epoch 21, batch 250, loss[loss=0.1578, simple_loss=0.2467, pruned_loss=0.03449, over 7273.00 frames.], tot_loss[loss=0.179, simple_loss=0.2658, pruned_loss=0.04609, over 1012188.89 frames.], batch size: 20, lr: 8.06e-04 +2022-05-27 18:15:37,868 INFO [train.py:823] (1/4) Epoch 21, batch 300, loss[loss=0.1788, simple_loss=0.2729, pruned_loss=0.04233, over 6588.00 frames.], tot_loss[loss=0.1805, simple_loss=0.2669, pruned_loss=0.04701, over 1100400.63 frames.], batch size: 34, lr: 8.05e-04 +2022-05-27 18:16:17,447 INFO [train.py:823] (1/4) Epoch 21, batch 350, loss[loss=0.1979, simple_loss=0.2879, pruned_loss=0.05399, over 7411.00 frames.], tot_loss[loss=0.1807, simple_loss=0.2675, pruned_loss=0.04691, over 1171395.74 frames.], batch size: 22, lr: 8.04e-04 +2022-05-27 18:16:56,505 INFO [train.py:823] (1/4) Epoch 21, batch 400, loss[loss=0.162, simple_loss=0.2439, pruned_loss=0.04009, over 7297.00 frames.], tot_loss[loss=0.181, simple_loss=0.2681, pruned_loss=0.04694, over 1226491.01 frames.], batch size: 17, lr: 8.03e-04 +2022-05-27 18:17:36,042 INFO [train.py:823] (1/4) Epoch 21, batch 450, loss[loss=0.1643, simple_loss=0.2612, pruned_loss=0.03372, over 7186.00 frames.], tot_loss[loss=0.1811, simple_loss=0.2683, pruned_loss=0.04692, over 1270883.82 frames.], batch size: 21, lr: 8.02e-04 +2022-05-27 18:18:15,336 INFO [train.py:823] (1/4) Epoch 21, batch 500, loss[loss=0.1735, simple_loss=0.2531, pruned_loss=0.04698, over 7183.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2684, pruned_loss=0.04696, over 1303981.90 frames.], batch size: 18, lr: 8.01e-04 +2022-05-27 18:18:54,646 INFO [train.py:823] (1/4) Epoch 21, batch 550, loss[loss=0.2069, simple_loss=0.2943, pruned_loss=0.05979, over 7377.00 frames.], tot_loss[loss=0.1809, simple_loss=0.268, pruned_loss=0.04695, over 1335405.63 frames.], batch size: 21, lr: 8.00e-04 +2022-05-27 18:19:33,804 INFO [train.py:823] (1/4) Epoch 21, batch 600, loss[loss=0.1974, simple_loss=0.2881, pruned_loss=0.0533, over 6408.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2679, pruned_loss=0.04693, over 1352921.66 frames.], batch size: 34, lr: 8.00e-04 +2022-05-27 18:20:13,154 INFO [train.py:823] (1/4) Epoch 21, batch 650, loss[loss=0.1589, simple_loss=0.2532, pruned_loss=0.03232, over 7309.00 frames.], tot_loss[loss=0.1819, simple_loss=0.2688, pruned_loss=0.04745, over 1368854.36 frames.], batch size: 22, lr: 7.99e-04 +2022-05-27 18:20:52,511 INFO [train.py:823] (1/4) Epoch 21, batch 700, loss[loss=0.1887, simple_loss=0.2726, pruned_loss=0.05239, over 7184.00 frames.], tot_loss[loss=0.1818, simple_loss=0.2692, pruned_loss=0.04721, over 1379970.90 frames.], batch size: 20, lr: 7.98e-04 +2022-05-27 18:21:31,554 INFO [train.py:823] (1/4) Epoch 21, batch 750, loss[loss=0.22, simple_loss=0.3007, pruned_loss=0.06966, over 7207.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2678, pruned_loss=0.04699, over 1378402.86 frames.], batch size: 25, lr: 7.97e-04 +2022-05-27 18:22:10,727 INFO [train.py:823] (1/4) Epoch 21, batch 800, loss[loss=0.1703, simple_loss=0.2596, pruned_loss=0.04049, over 7351.00 frames.], tot_loss[loss=0.1805, simple_loss=0.2677, pruned_loss=0.04664, over 1384002.19 frames.], batch size: 23, lr: 7.96e-04 +2022-05-27 18:22:50,059 INFO [train.py:823] (1/4) Epoch 21, batch 850, loss[loss=0.1597, simple_loss=0.2618, pruned_loss=0.02881, over 7190.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2669, pruned_loss=0.04646, over 1388565.93 frames.], batch size: 20, lr: 7.95e-04 +2022-05-27 18:23:29,126 INFO [train.py:823] (1/4) Epoch 21, batch 900, loss[loss=0.1582, simple_loss=0.2607, pruned_loss=0.0278, over 7361.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2663, pruned_loss=0.04596, over 1386291.04 frames.], batch size: 20, lr: 7.94e-04 +2022-05-27 18:24:19,597 INFO [train.py:823] (1/4) Epoch 22, batch 0, loss[loss=0.1819, simple_loss=0.2638, pruned_loss=0.04997, over 7380.00 frames.], tot_loss[loss=0.1819, simple_loss=0.2638, pruned_loss=0.04997, over 7380.00 frames.], batch size: 21, lr: 7.75e-04 +2022-05-27 18:25:00,021 INFO [train.py:823] (1/4) Epoch 22, batch 50, loss[loss=0.2311, simple_loss=0.3049, pruned_loss=0.07863, over 7175.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2609, pruned_loss=0.04463, over 321284.61 frames.], batch size: 22, lr: 7.74e-04 +2022-05-27 18:25:39,864 INFO [train.py:823] (1/4) Epoch 22, batch 100, loss[loss=0.192, simple_loss=0.2806, pruned_loss=0.05166, over 7110.00 frames.], tot_loss[loss=0.1768, simple_loss=0.2632, pruned_loss=0.04518, over 567964.59 frames.], batch size: 20, lr: 7.73e-04 +2022-05-27 18:26:18,846 INFO [train.py:823] (1/4) Epoch 22, batch 150, loss[loss=0.1877, simple_loss=0.2801, pruned_loss=0.04764, over 4889.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2662, pruned_loss=0.04602, over 754699.48 frames.], batch size: 47, lr: 7.73e-04 +2022-05-27 18:26:59,551 INFO [train.py:823] (1/4) Epoch 22, batch 200, loss[loss=0.1885, simple_loss=0.2858, pruned_loss=0.04559, over 7113.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2676, pruned_loss=0.04609, over 899062.12 frames.], batch size: 20, lr: 7.72e-04 +2022-05-27 18:27:38,545 INFO [train.py:823] (1/4) Epoch 22, batch 250, loss[loss=0.1737, simple_loss=0.251, pruned_loss=0.04817, over 7085.00 frames.], tot_loss[loss=0.1785, simple_loss=0.2665, pruned_loss=0.04528, over 1016762.95 frames.], batch size: 18, lr: 7.71e-04 +2022-05-27 18:28:18,091 INFO [train.py:823] (1/4) Epoch 22, batch 300, loss[loss=0.1596, simple_loss=0.2466, pruned_loss=0.03632, over 7193.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2659, pruned_loss=0.04516, over 1104233.73 frames.], batch size: 18, lr: 7.70e-04 +2022-05-27 18:28:58,652 INFO [train.py:823] (1/4) Epoch 22, batch 350, loss[loss=0.1797, simple_loss=0.276, pruned_loss=0.04169, over 7054.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2672, pruned_loss=0.04615, over 1175928.68 frames.], batch size: 29, lr: 7.69e-04 +2022-05-27 18:29:37,847 INFO [train.py:823] (1/4) Epoch 22, batch 400, loss[loss=0.2031, simple_loss=0.2828, pruned_loss=0.06166, over 7177.00 frames.], tot_loss[loss=0.1795, simple_loss=0.2666, pruned_loss=0.04615, over 1231457.93 frames.], batch size: 21, lr: 7.68e-04 +2022-05-27 18:30:17,484 INFO [train.py:823] (1/4) Epoch 22, batch 450, loss[loss=0.1855, simple_loss=0.2462, pruned_loss=0.0624, over 7238.00 frames.], tot_loss[loss=0.179, simple_loss=0.2663, pruned_loss=0.0458, over 1277992.98 frames.], batch size: 16, lr: 7.67e-04 +2022-05-27 18:30:56,595 INFO [train.py:823] (1/4) Epoch 22, batch 500, loss[loss=0.1952, simple_loss=0.2799, pruned_loss=0.05525, over 6447.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2655, pruned_loss=0.04542, over 1304178.88 frames.], batch size: 34, lr: 7.66e-04 +2022-05-27 18:31:36,003 INFO [train.py:823] (1/4) Epoch 22, batch 550, loss[loss=0.1821, simple_loss=0.2751, pruned_loss=0.0445, over 6914.00 frames.], tot_loss[loss=0.1774, simple_loss=0.265, pruned_loss=0.04496, over 1329866.61 frames.], batch size: 29, lr: 7.65e-04 +2022-05-27 18:32:15,238 INFO [train.py:823] (1/4) Epoch 22, batch 600, loss[loss=0.1785, simple_loss=0.2553, pruned_loss=0.05088, over 7013.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2647, pruned_loss=0.04485, over 1348994.39 frames.], batch size: 17, lr: 7.65e-04 +2022-05-27 18:32:54,263 INFO [train.py:823] (1/4) Epoch 22, batch 650, loss[loss=0.2022, simple_loss=0.2969, pruned_loss=0.05376, over 7112.00 frames.], tot_loss[loss=0.1774, simple_loss=0.2649, pruned_loss=0.04496, over 1359212.04 frames.], batch size: 20, lr: 7.64e-04 +2022-05-27 18:33:33,446 INFO [train.py:823] (1/4) Epoch 22, batch 700, loss[loss=0.198, simple_loss=0.2824, pruned_loss=0.05678, over 7103.00 frames.], tot_loss[loss=0.1769, simple_loss=0.2647, pruned_loss=0.04454, over 1371888.00 frames.], batch size: 19, lr: 7.63e-04 +2022-05-27 18:34:12,328 INFO [train.py:823] (1/4) Epoch 22, batch 750, loss[loss=0.1664, simple_loss=0.2477, pruned_loss=0.04249, over 7000.00 frames.], tot_loss[loss=0.1772, simple_loss=0.265, pruned_loss=0.0447, over 1381705.02 frames.], batch size: 16, lr: 7.62e-04 +2022-05-27 18:34:51,820 INFO [train.py:823] (1/4) Epoch 22, batch 800, loss[loss=0.1495, simple_loss=0.2369, pruned_loss=0.03105, over 7372.00 frames.], tot_loss[loss=0.177, simple_loss=0.2647, pruned_loss=0.04466, over 1391744.44 frames.], batch size: 20, lr: 7.61e-04 +2022-05-27 18:35:30,926 INFO [train.py:823] (1/4) Epoch 22, batch 850, loss[loss=0.1657, simple_loss=0.2709, pruned_loss=0.03023, over 6620.00 frames.], tot_loss[loss=0.1776, simple_loss=0.2654, pruned_loss=0.04484, over 1400003.72 frames.], batch size: 34, lr: 7.60e-04 +2022-05-27 18:36:10,307 INFO [train.py:823] (1/4) Epoch 22, batch 900, loss[loss=0.1924, simple_loss=0.2809, pruned_loss=0.05192, over 7159.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2664, pruned_loss=0.04513, over 1404926.76 frames.], batch size: 23, lr: 7.59e-04 +2022-05-27 18:37:01,199 INFO [train.py:823] (1/4) Epoch 23, batch 0, loss[loss=0.1549, simple_loss=0.2306, pruned_loss=0.03961, over 6820.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2306, pruned_loss=0.03961, over 6820.00 frames.], batch size: 15, lr: 7.42e-04 +2022-05-27 18:37:41,610 INFO [train.py:823] (1/4) Epoch 23, batch 50, loss[loss=0.174, simple_loss=0.2678, pruned_loss=0.04012, over 7367.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2631, pruned_loss=0.04319, over 321251.93 frames.], batch size: 21, lr: 7.41e-04 +2022-05-27 18:38:20,830 INFO [train.py:823] (1/4) Epoch 23, batch 100, loss[loss=0.1819, simple_loss=0.2661, pruned_loss=0.04884, over 7381.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2639, pruned_loss=0.04314, over 563105.37 frames.], batch size: 20, lr: 7.41e-04 +2022-05-27 18:39:00,154 INFO [train.py:823] (1/4) Epoch 23, batch 150, loss[loss=0.1547, simple_loss=0.2346, pruned_loss=0.03744, over 7311.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2655, pruned_loss=0.04439, over 752642.94 frames.], batch size: 18, lr: 7.40e-04 +2022-05-27 18:39:39,814 INFO [train.py:823] (1/4) Epoch 23, batch 200, loss[loss=0.1685, simple_loss=0.2612, pruned_loss=0.03786, over 4856.00 frames.], tot_loss[loss=0.1756, simple_loss=0.264, pruned_loss=0.04361, over 899515.87 frames.], batch size: 48, lr: 7.39e-04 +2022-05-27 18:40:19,170 INFO [train.py:823] (1/4) Epoch 23, batch 250, loss[loss=0.1537, simple_loss=0.2509, pruned_loss=0.02823, over 7108.00 frames.], tot_loss[loss=0.1761, simple_loss=0.2643, pruned_loss=0.04396, over 1018947.25 frames.], batch size: 18, lr: 7.38e-04 +2022-05-27 18:40:58,267 INFO [train.py:823] (1/4) Epoch 23, batch 300, loss[loss=0.1763, simple_loss=0.2715, pruned_loss=0.04053, over 7296.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2655, pruned_loss=0.04442, over 1111510.74 frames.], batch size: 22, lr: 7.37e-04 +2022-05-27 18:41:37,498 INFO [train.py:823] (1/4) Epoch 23, batch 350, loss[loss=0.1913, simple_loss=0.2814, pruned_loss=0.05062, over 7287.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2648, pruned_loss=0.04469, over 1182285.27 frames.], batch size: 20, lr: 7.36e-04 +2022-05-27 18:42:16,429 INFO [train.py:823] (1/4) Epoch 23, batch 400, loss[loss=0.151, simple_loss=0.2251, pruned_loss=0.0384, over 7302.00 frames.], tot_loss[loss=0.176, simple_loss=0.263, pruned_loss=0.04451, over 1234437.81 frames.], batch size: 17, lr: 7.36e-04 +2022-05-27 18:42:55,384 INFO [train.py:823] (1/4) Epoch 23, batch 450, loss[loss=0.1727, simple_loss=0.2683, pruned_loss=0.03853, over 5052.00 frames.], tot_loss[loss=0.1765, simple_loss=0.2637, pruned_loss=0.04461, over 1273747.77 frames.], batch size: 47, lr: 7.35e-04 +2022-05-27 18:43:34,452 INFO [train.py:823] (1/4) Epoch 23, batch 500, loss[loss=0.1901, simple_loss=0.2881, pruned_loss=0.04605, over 6487.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2647, pruned_loss=0.04499, over 1303175.96 frames.], batch size: 34, lr: 7.34e-04 +2022-05-27 18:44:13,786 INFO [train.py:823] (1/4) Epoch 23, batch 550, loss[loss=0.183, simple_loss=0.2724, pruned_loss=0.0468, over 7255.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2652, pruned_loss=0.04456, over 1333751.16 frames.], batch size: 24, lr: 7.33e-04 +2022-05-27 18:44:52,834 INFO [train.py:823] (1/4) Epoch 23, batch 600, loss[loss=0.1646, simple_loss=0.2572, pruned_loss=0.03604, over 4728.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2651, pruned_loss=0.04462, over 1349055.50 frames.], batch size: 47, lr: 7.32e-04 +2022-05-27 18:45:32,268 INFO [train.py:823] (1/4) Epoch 23, batch 650, loss[loss=0.1621, simple_loss=0.2539, pruned_loss=0.03516, over 7098.00 frames.], tot_loss[loss=0.1761, simple_loss=0.2639, pruned_loss=0.04419, over 1364012.05 frames.], batch size: 19, lr: 7.32e-04 +2022-05-27 18:46:11,140 INFO [train.py:823] (1/4) Epoch 23, batch 700, loss[loss=0.1784, simple_loss=0.263, pruned_loss=0.04691, over 7021.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2645, pruned_loss=0.04444, over 1370518.97 frames.], batch size: 16, lr: 7.31e-04 +2022-05-27 18:46:50,488 INFO [train.py:823] (1/4) Epoch 23, batch 750, loss[loss=0.2047, simple_loss=0.2864, pruned_loss=0.06151, over 5035.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2642, pruned_loss=0.04418, over 1376687.41 frames.], batch size: 46, lr: 7.30e-04 +2022-05-27 18:47:30,855 INFO [train.py:823] (1/4) Epoch 23, batch 800, loss[loss=0.1653, simple_loss=0.2516, pruned_loss=0.03946, over 7191.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2636, pruned_loss=0.04396, over 1388941.33 frames.], batch size: 18, lr: 7.29e-04 +2022-05-27 18:48:10,090 INFO [train.py:823] (1/4) Epoch 23, batch 850, loss[loss=0.1951, simple_loss=0.2879, pruned_loss=0.05118, over 7155.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2638, pruned_loss=0.04434, over 1396382.53 frames.], batch size: 23, lr: 7.28e-04 +2022-05-27 18:48:48,840 INFO [train.py:823] (1/4) Epoch 23, batch 900, loss[loss=0.1841, simple_loss=0.259, pruned_loss=0.05463, over 7039.00 frames.], tot_loss[loss=0.1744, simple_loss=0.262, pruned_loss=0.04342, over 1401594.56 frames.], batch size: 17, lr: 7.28e-04 +2022-05-27 18:49:41,152 INFO [train.py:823] (1/4) Epoch 24, batch 0, loss[loss=0.1617, simple_loss=0.2431, pruned_loss=0.0401, over 7307.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2431, pruned_loss=0.0401, over 7307.00 frames.], batch size: 18, lr: 7.12e-04 +2022-05-27 18:50:19,956 INFO [train.py:823] (1/4) Epoch 24, batch 50, loss[loss=0.1372, simple_loss=0.2236, pruned_loss=0.02534, over 7153.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2608, pruned_loss=0.04143, over 319039.56 frames.], batch size: 17, lr: 7.11e-04 +2022-05-27 18:51:00,449 INFO [train.py:823] (1/4) Epoch 24, batch 100, loss[loss=0.1771, simple_loss=0.2742, pruned_loss=0.04002, over 6626.00 frames.], tot_loss[loss=0.174, simple_loss=0.2625, pruned_loss=0.04279, over 560111.59 frames.], batch size: 34, lr: 7.10e-04 +2022-05-27 18:51:39,696 INFO [train.py:823] (1/4) Epoch 24, batch 150, loss[loss=0.1717, simple_loss=0.2753, pruned_loss=0.03402, over 6901.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2644, pruned_loss=0.04346, over 750081.10 frames.], batch size: 29, lr: 7.10e-04 +2022-05-27 18:52:18,807 INFO [train.py:823] (1/4) Epoch 24, batch 200, loss[loss=0.1857, simple_loss=0.2768, pruned_loss=0.04727, over 7283.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2632, pruned_loss=0.04301, over 899507.83 frames.], batch size: 21, lr: 7.09e-04 +2022-05-27 18:52:58,070 INFO [train.py:823] (1/4) Epoch 24, batch 250, loss[loss=0.1548, simple_loss=0.2367, pruned_loss=0.03649, over 7292.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2627, pruned_loss=0.0432, over 1014810.16 frames.], batch size: 17, lr: 7.08e-04 +2022-05-27 18:53:37,187 INFO [train.py:823] (1/4) Epoch 24, batch 300, loss[loss=0.205, simple_loss=0.2837, pruned_loss=0.06317, over 7338.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2633, pruned_loss=0.04421, over 1100247.00 frames.], batch size: 23, lr: 7.07e-04 +2022-05-27 18:54:15,973 INFO [train.py:823] (1/4) Epoch 24, batch 350, loss[loss=0.1438, simple_loss=0.2315, pruned_loss=0.02806, over 7289.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2625, pruned_loss=0.04387, over 1174826.60 frames.], batch size: 17, lr: 7.07e-04 +2022-05-27 18:54:55,394 INFO [train.py:823] (1/4) Epoch 24, batch 400, loss[loss=0.1952, simple_loss=0.2719, pruned_loss=0.05924, over 7337.00 frames.], tot_loss[loss=0.1749, simple_loss=0.262, pruned_loss=0.04385, over 1226272.46 frames.], batch size: 23, lr: 7.06e-04 +2022-05-27 18:55:34,350 INFO [train.py:823] (1/4) Epoch 24, batch 450, loss[loss=0.1679, simple_loss=0.2513, pruned_loss=0.04229, over 7180.00 frames.], tot_loss[loss=0.1748, simple_loss=0.2624, pruned_loss=0.0436, over 1267565.35 frames.], batch size: 18, lr: 7.05e-04 +2022-05-27 18:56:13,748 INFO [train.py:823] (1/4) Epoch 24, batch 500, loss[loss=0.1682, simple_loss=0.2647, pruned_loss=0.0359, over 7285.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2629, pruned_loss=0.04351, over 1303718.01 frames.], batch size: 21, lr: 7.04e-04 +2022-05-27 18:56:52,899 INFO [train.py:823] (1/4) Epoch 24, batch 550, loss[loss=0.1623, simple_loss=0.2702, pruned_loss=0.02722, over 6503.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2627, pruned_loss=0.04308, over 1327625.26 frames.], batch size: 34, lr: 7.04e-04 +2022-05-27 18:57:32,003 INFO [train.py:823] (1/4) Epoch 24, batch 600, loss[loss=0.1841, simple_loss=0.2734, pruned_loss=0.04741, over 7149.00 frames.], tot_loss[loss=0.1755, simple_loss=0.2638, pruned_loss=0.04357, over 1346385.93 frames.], batch size: 23, lr: 7.03e-04 +2022-05-27 18:58:10,819 INFO [train.py:823] (1/4) Epoch 24, batch 650, loss[loss=0.1833, simple_loss=0.2704, pruned_loss=0.04807, over 7102.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2633, pruned_loss=0.04284, over 1359369.10 frames.], batch size: 19, lr: 7.02e-04 +2022-05-27 18:58:49,797 INFO [train.py:823] (1/4) Epoch 24, batch 700, loss[loss=0.1795, simple_loss=0.2717, pruned_loss=0.04363, over 7171.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2636, pruned_loss=0.04296, over 1373552.19 frames.], batch size: 22, lr: 7.01e-04 +2022-05-27 18:59:29,043 INFO [train.py:823] (1/4) Epoch 24, batch 750, loss[loss=0.1813, simple_loss=0.2672, pruned_loss=0.0477, over 7113.00 frames.], tot_loss[loss=0.174, simple_loss=0.2629, pruned_loss=0.04257, over 1385772.65 frames.], batch size: 20, lr: 7.01e-04 +2022-05-27 19:00:08,808 INFO [train.py:823] (1/4) Epoch 24, batch 800, loss[loss=0.1667, simple_loss=0.2383, pruned_loss=0.04753, over 6815.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2623, pruned_loss=0.04268, over 1392968.09 frames.], batch size: 15, lr: 7.00e-04 +2022-05-27 19:00:47,534 INFO [train.py:823] (1/4) Epoch 24, batch 850, loss[loss=0.1798, simple_loss=0.2643, pruned_loss=0.04764, over 7111.00 frames.], tot_loss[loss=0.1736, simple_loss=0.262, pruned_loss=0.04261, over 1396416.94 frames.], batch size: 20, lr: 6.99e-04 +2022-05-27 19:01:28,141 INFO [train.py:823] (1/4) Epoch 24, batch 900, loss[loss=0.18, simple_loss=0.2736, pruned_loss=0.04322, over 6525.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2621, pruned_loss=0.04283, over 1398715.02 frames.], batch size: 34, lr: 6.98e-04 +2022-05-27 19:02:07,086 INFO [train.py:823] (1/4) Epoch 24, batch 950, loss[loss=0.1576, simple_loss=0.2447, pruned_loss=0.03526, over 7098.00 frames.], tot_loss[loss=0.1736, simple_loss=0.262, pruned_loss=0.04265, over 1394978.30 frames.], batch size: 18, lr: 6.98e-04 +2022-05-27 19:02:19,650 INFO [train.py:823] (1/4) Epoch 25, batch 0, loss[loss=0.1753, simple_loss=0.2691, pruned_loss=0.04077, over 7284.00 frames.], tot_loss[loss=0.1753, simple_loss=0.2691, pruned_loss=0.04077, over 7284.00 frames.], batch size: 21, lr: 6.84e-04 +2022-05-27 19:02:58,867 INFO [train.py:823] (1/4) Epoch 25, batch 50, loss[loss=0.1558, simple_loss=0.2355, pruned_loss=0.03808, over 7307.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2604, pruned_loss=0.04197, over 324429.92 frames.], batch size: 17, lr: 6.83e-04 +2022-05-27 19:03:37,891 INFO [train.py:823] (1/4) Epoch 25, batch 100, loss[loss=0.1576, simple_loss=0.2339, pruned_loss=0.04063, over 7256.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2605, pruned_loss=0.04222, over 564418.63 frames.], batch size: 16, lr: 6.82e-04 +2022-05-27 19:04:17,005 INFO [train.py:823] (1/4) Epoch 25, batch 150, loss[loss=0.1775, simple_loss=0.2671, pruned_loss=0.04397, over 7309.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2588, pruned_loss=0.04221, over 759483.52 frames.], batch size: 22, lr: 6.82e-04 +2022-05-27 19:04:56,361 INFO [train.py:823] (1/4) Epoch 25, batch 200, loss[loss=0.1739, simple_loss=0.2599, pruned_loss=0.04391, over 7286.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2598, pruned_loss=0.04253, over 911205.79 frames.], batch size: 21, lr: 6.81e-04 +2022-05-27 19:05:35,264 INFO [train.py:823] (1/4) Epoch 25, batch 250, loss[loss=0.1447, simple_loss=0.231, pruned_loss=0.02921, over 7297.00 frames.], tot_loss[loss=0.173, simple_loss=0.2606, pruned_loss=0.04274, over 1022539.54 frames.], batch size: 17, lr: 6.80e-04 +2022-05-27 19:06:14,216 INFO [train.py:823] (1/4) Epoch 25, batch 300, loss[loss=0.1648, simple_loss=0.2667, pruned_loss=0.03144, over 7292.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2608, pruned_loss=0.04183, over 1116498.44 frames.], batch size: 21, lr: 6.80e-04 +2022-05-27 19:06:53,434 INFO [train.py:823] (1/4) Epoch 25, batch 350, loss[loss=0.2002, simple_loss=0.2917, pruned_loss=0.0544, over 7137.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2608, pruned_loss=0.04206, over 1182458.43 frames.], batch size: 23, lr: 6.79e-04 +2022-05-27 19:07:32,594 INFO [train.py:823] (1/4) Epoch 25, batch 400, loss[loss=0.1735, simple_loss=0.2619, pruned_loss=0.04254, over 7197.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2628, pruned_loss=0.04273, over 1239269.42 frames.], batch size: 25, lr: 6.78e-04 +2022-05-27 19:08:11,856 INFO [train.py:823] (1/4) Epoch 25, batch 450, loss[loss=0.1572, simple_loss=0.2387, pruned_loss=0.03785, over 6807.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2635, pruned_loss=0.04316, over 1269806.50 frames.], batch size: 15, lr: 6.77e-04 +2022-05-27 19:08:50,542 INFO [train.py:823] (1/4) Epoch 25, batch 500, loss[loss=0.1352, simple_loss=0.2123, pruned_loss=0.02899, over 7012.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2629, pruned_loss=0.04282, over 1303213.10 frames.], batch size: 16, lr: 6.77e-04 +2022-05-27 19:09:29,767 INFO [train.py:823] (1/4) Epoch 25, batch 550, loss[loss=0.1647, simple_loss=0.2643, pruned_loss=0.03255, over 7181.00 frames.], tot_loss[loss=0.1736, simple_loss=0.262, pruned_loss=0.04259, over 1331015.13 frames.], batch size: 21, lr: 6.76e-04 +2022-05-27 19:10:08,984 INFO [train.py:823] (1/4) Epoch 25, batch 600, loss[loss=0.1778, simple_loss=0.2779, pruned_loss=0.03884, over 7284.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2611, pruned_loss=0.04241, over 1344751.41 frames.], batch size: 21, lr: 6.75e-04 +2022-05-27 19:10:49,696 INFO [train.py:823] (1/4) Epoch 25, batch 650, loss[loss=0.1614, simple_loss=0.256, pruned_loss=0.03335, over 7274.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2609, pruned_loss=0.042, over 1359229.26 frames.], batch size: 20, lr: 6.75e-04 +2022-05-27 19:11:28,949 INFO [train.py:823] (1/4) Epoch 25, batch 700, loss[loss=0.1492, simple_loss=0.2363, pruned_loss=0.03106, over 7139.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2609, pruned_loss=0.04179, over 1370763.22 frames.], batch size: 17, lr: 6.74e-04 +2022-05-27 19:12:08,293 INFO [train.py:823] (1/4) Epoch 25, batch 750, loss[loss=0.166, simple_loss=0.2529, pruned_loss=0.03958, over 7386.00 frames.], tot_loss[loss=0.1717, simple_loss=0.26, pruned_loss=0.04174, over 1378302.14 frames.], batch size: 20, lr: 6.73e-04 +2022-05-27 19:12:47,491 INFO [train.py:823] (1/4) Epoch 25, batch 800, loss[loss=0.1842, simple_loss=0.2744, pruned_loss=0.04696, over 7190.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2595, pruned_loss=0.04146, over 1389613.23 frames.], batch size: 21, lr: 6.73e-04 +2022-05-27 19:13:27,810 INFO [train.py:823] (1/4) Epoch 25, batch 850, loss[loss=0.1506, simple_loss=0.2408, pruned_loss=0.03016, over 7188.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2609, pruned_loss=0.04199, over 1395483.38 frames.], batch size: 18, lr: 6.72e-04 +2022-05-27 19:14:08,749 INFO [train.py:823] (1/4) Epoch 25, batch 900, loss[loss=0.1871, simple_loss=0.2831, pruned_loss=0.04558, over 6413.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2612, pruned_loss=0.04193, over 1394531.05 frames.], batch size: 34, lr: 6.71e-04 +2022-05-27 19:14:59,579 INFO [train.py:823] (1/4) Epoch 26, batch 0, loss[loss=0.157, simple_loss=0.2363, pruned_loss=0.03881, over 7294.00 frames.], tot_loss[loss=0.157, simple_loss=0.2363, pruned_loss=0.03881, over 7294.00 frames.], batch size: 18, lr: 6.58e-04 +2022-05-27 19:15:38,644 INFO [train.py:823] (1/4) Epoch 26, batch 50, loss[loss=0.1819, simple_loss=0.2678, pruned_loss=0.04806, over 7372.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2576, pruned_loss=0.0396, over 323615.13 frames.], batch size: 20, lr: 6.57e-04 +2022-05-27 19:16:17,946 INFO [train.py:823] (1/4) Epoch 26, batch 100, loss[loss=0.2088, simple_loss=0.2977, pruned_loss=0.05992, over 7223.00 frames.], tot_loss[loss=0.1725, simple_loss=0.2609, pruned_loss=0.04211, over 568708.99 frames.], batch size: 25, lr: 6.56e-04 +2022-05-27 19:16:57,047 INFO [train.py:823] (1/4) Epoch 26, batch 150, loss[loss=0.1728, simple_loss=0.263, pruned_loss=0.04131, over 7173.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2604, pruned_loss=0.04257, over 755171.50 frames.], batch size: 25, lr: 6.56e-04 +2022-05-27 19:17:36,088 INFO [train.py:823] (1/4) Epoch 26, batch 200, loss[loss=0.1621, simple_loss=0.2546, pruned_loss=0.03479, over 7097.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2616, pruned_loss=0.04285, over 901378.70 frames.], batch size: 18, lr: 6.55e-04 +2022-05-27 19:18:15,498 INFO [train.py:823] (1/4) Epoch 26, batch 250, loss[loss=0.1762, simple_loss=0.2736, pruned_loss=0.03941, over 7419.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2604, pruned_loss=0.04217, over 1016961.31 frames.], batch size: 22, lr: 6.55e-04 +2022-05-27 19:18:59,347 INFO [train.py:823] (1/4) Epoch 26, batch 300, loss[loss=0.1606, simple_loss=0.2599, pruned_loss=0.0306, over 7117.00 frames.], tot_loss[loss=0.1717, simple_loss=0.26, pruned_loss=0.04175, over 1107512.99 frames.], batch size: 20, lr: 6.54e-04 +2022-05-27 19:19:38,150 INFO [train.py:823] (1/4) Epoch 26, batch 350, loss[loss=0.209, simple_loss=0.2859, pruned_loss=0.06604, over 6481.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2601, pruned_loss=0.04206, over 1178699.74 frames.], batch size: 34, lr: 6.53e-04 +2022-05-27 19:20:17,202 INFO [train.py:823] (1/4) Epoch 26, batch 400, loss[loss=0.1782, simple_loss=0.2734, pruned_loss=0.04152, over 7156.00 frames.], tot_loss[loss=0.1715, simple_loss=0.2595, pruned_loss=0.04176, over 1234466.99 frames.], batch size: 23, lr: 6.53e-04 +2022-05-27 19:20:55,946 INFO [train.py:823] (1/4) Epoch 26, batch 450, loss[loss=0.1794, simple_loss=0.2631, pruned_loss=0.04782, over 7192.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2601, pruned_loss=0.04207, over 1274079.77 frames.], batch size: 21, lr: 6.52e-04 +2022-05-27 19:21:34,730 INFO [train.py:823] (1/4) Epoch 26, batch 500, loss[loss=0.1784, simple_loss=0.2749, pruned_loss=0.04096, over 7042.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2611, pruned_loss=0.0421, over 1304984.66 frames.], batch size: 26, lr: 6.51e-04 +2022-05-27 19:22:13,813 INFO [train.py:823] (1/4) Epoch 26, batch 550, loss[loss=0.1369, simple_loss=0.2183, pruned_loss=0.02771, over 7008.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2608, pruned_loss=0.04218, over 1328407.74 frames.], batch size: 16, lr: 6.51e-04 +2022-05-27 19:22:52,192 INFO [train.py:823] (1/4) Epoch 26, batch 600, loss[loss=0.1627, simple_loss=0.2596, pruned_loss=0.03291, over 7310.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2602, pruned_loss=0.04207, over 1347898.44 frames.], batch size: 22, lr: 6.50e-04 +2022-05-27 19:23:30,987 INFO [train.py:823] (1/4) Epoch 26, batch 650, loss[loss=0.1902, simple_loss=0.2804, pruned_loss=0.05006, over 7335.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2589, pruned_loss=0.04161, over 1359083.17 frames.], batch size: 23, lr: 6.49e-04 +2022-05-27 19:24:10,339 INFO [train.py:823] (1/4) Epoch 26, batch 700, loss[loss=0.1856, simple_loss=0.2734, pruned_loss=0.04893, over 7032.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2597, pruned_loss=0.0417, over 1372551.33 frames.], batch size: 26, lr: 6.49e-04 +2022-05-27 19:24:49,310 INFO [train.py:823] (1/4) Epoch 26, batch 750, loss[loss=0.1936, simple_loss=0.2711, pruned_loss=0.05812, over 7295.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2604, pruned_loss=0.04223, over 1375478.07 frames.], batch size: 19, lr: 6.48e-04 +2022-05-27 19:25:29,463 INFO [train.py:823] (1/4) Epoch 26, batch 800, loss[loss=0.1314, simple_loss=0.2161, pruned_loss=0.02334, over 6838.00 frames.], tot_loss[loss=0.1723, simple_loss=0.26, pruned_loss=0.04229, over 1382551.86 frames.], batch size: 15, lr: 6.47e-04 +2022-05-27 19:26:08,495 INFO [train.py:823] (1/4) Epoch 26, batch 850, loss[loss=0.1671, simple_loss=0.2433, pruned_loss=0.04548, over 6834.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2599, pruned_loss=0.04236, over 1393870.39 frames.], batch size: 15, lr: 6.47e-04 +2022-05-27 19:26:47,856 INFO [train.py:823] (1/4) Epoch 26, batch 900, loss[loss=0.1351, simple_loss=0.2139, pruned_loss=0.02812, over 7027.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2607, pruned_loss=0.04229, over 1395412.92 frames.], batch size: 17, lr: 6.46e-04 +2022-05-27 19:27:39,217 INFO [train.py:823] (1/4) Epoch 27, batch 0, loss[loss=0.1554, simple_loss=0.239, pruned_loss=0.03586, over 7200.00 frames.], tot_loss[loss=0.1554, simple_loss=0.239, pruned_loss=0.03586, over 7200.00 frames.], batch size: 18, lr: 6.34e-04 +2022-05-27 19:28:18,586 INFO [train.py:823] (1/4) Epoch 27, batch 50, loss[loss=0.1431, simple_loss=0.2284, pruned_loss=0.02887, over 7197.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2583, pruned_loss=0.03996, over 322164.09 frames.], batch size: 18, lr: 6.33e-04 +2022-05-27 19:28:57,570 INFO [train.py:823] (1/4) Epoch 27, batch 100, loss[loss=0.1888, simple_loss=0.2868, pruned_loss=0.04535, over 7199.00 frames.], tot_loss[loss=0.1701, simple_loss=0.2596, pruned_loss=0.04029, over 563833.12 frames.], batch size: 25, lr: 6.32e-04 +2022-05-27 19:29:36,474 INFO [train.py:823] (1/4) Epoch 27, batch 150, loss[loss=0.1584, simple_loss=0.2383, pruned_loss=0.03922, over 7310.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2594, pruned_loss=0.04106, over 753343.49 frames.], batch size: 18, lr: 6.32e-04 +2022-05-27 19:30:15,604 INFO [train.py:823] (1/4) Epoch 27, batch 200, loss[loss=0.1714, simple_loss=0.2627, pruned_loss=0.04008, over 7427.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2602, pruned_loss=0.0421, over 900366.66 frames.], batch size: 22, lr: 6.31e-04 +2022-05-27 19:30:54,945 INFO [train.py:823] (1/4) Epoch 27, batch 250, loss[loss=0.1592, simple_loss=0.2438, pruned_loss=0.03728, over 7026.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2606, pruned_loss=0.04103, over 1012676.86 frames.], batch size: 17, lr: 6.31e-04 +2022-05-27 19:31:34,001 INFO [train.py:823] (1/4) Epoch 27, batch 300, loss[loss=0.1463, simple_loss=0.2384, pruned_loss=0.02707, over 7385.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2586, pruned_loss=0.04063, over 1106914.27 frames.], batch size: 21, lr: 6.30e-04 +2022-05-27 19:32:13,810 INFO [train.py:823] (1/4) Epoch 27, batch 350, loss[loss=0.1596, simple_loss=0.2504, pruned_loss=0.03434, over 7294.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2582, pruned_loss=0.04032, over 1178611.34 frames.], batch size: 19, lr: 6.29e-04 +2022-05-27 19:32:52,588 INFO [train.py:823] (1/4) Epoch 27, batch 400, loss[loss=0.1603, simple_loss=0.2514, pruned_loss=0.03458, over 7278.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2587, pruned_loss=0.04052, over 1233213.84 frames.], batch size: 20, lr: 6.29e-04 +2022-05-27 19:33:33,771 INFO [train.py:823] (1/4) Epoch 27, batch 450, loss[loss=0.1828, simple_loss=0.2636, pruned_loss=0.05101, over 4919.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2586, pruned_loss=0.04045, over 1276488.96 frames.], batch size: 46, lr: 6.28e-04 +2022-05-27 19:34:12,451 INFO [train.py:823] (1/4) Epoch 27, batch 500, loss[loss=0.2003, simple_loss=0.2745, pruned_loss=0.06302, over 7153.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2582, pruned_loss=0.04038, over 1300939.57 frames.], batch size: 23, lr: 6.28e-04 +2022-05-27 19:34:52,010 INFO [train.py:823] (1/4) Epoch 27, batch 550, loss[loss=0.1547, simple_loss=0.2516, pruned_loss=0.02887, over 7279.00 frames.], tot_loss[loss=0.1702, simple_loss=0.259, pruned_loss=0.04064, over 1329357.97 frames.], batch size: 20, lr: 6.27e-04 +2022-05-27 19:35:30,722 INFO [train.py:823] (1/4) Epoch 27, batch 600, loss[loss=0.1463, simple_loss=0.2292, pruned_loss=0.03164, over 7308.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2595, pruned_loss=0.04116, over 1355642.22 frames.], batch size: 18, lr: 6.26e-04 +2022-05-27 19:36:10,981 INFO [train.py:823] (1/4) Epoch 27, batch 650, loss[loss=0.1561, simple_loss=0.2401, pruned_loss=0.03607, over 7192.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2597, pruned_loss=0.04097, over 1374110.36 frames.], batch size: 19, lr: 6.26e-04 +2022-05-27 19:36:51,524 INFO [train.py:823] (1/4) Epoch 27, batch 700, loss[loss=0.1722, simple_loss=0.2783, pruned_loss=0.03301, over 7371.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2595, pruned_loss=0.04093, over 1383805.53 frames.], batch size: 21, lr: 6.25e-04 +2022-05-27 19:37:31,035 INFO [train.py:823] (1/4) Epoch 27, batch 750, loss[loss=0.1751, simple_loss=0.254, pruned_loss=0.04807, over 7197.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2592, pruned_loss=0.04106, over 1392459.90 frames.], batch size: 19, lr: 6.25e-04 +2022-05-27 19:38:09,900 INFO [train.py:823] (1/4) Epoch 27, batch 800, loss[loss=0.2039, simple_loss=0.2893, pruned_loss=0.05927, over 7164.00 frames.], tot_loss[loss=0.1717, simple_loss=0.26, pruned_loss=0.04175, over 1393285.32 frames.], batch size: 23, lr: 6.24e-04 +2022-05-27 19:38:49,277 INFO [train.py:823] (1/4) Epoch 27, batch 850, loss[loss=0.1621, simple_loss=0.2678, pruned_loss=0.0282, over 7105.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2587, pruned_loss=0.04138, over 1395927.18 frames.], batch size: 20, lr: 6.23e-04 +2022-05-27 19:39:28,591 INFO [train.py:823] (1/4) Epoch 27, batch 900, loss[loss=0.1623, simple_loss=0.2362, pruned_loss=0.04423, over 7290.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2583, pruned_loss=0.04111, over 1398403.82 frames.], batch size: 17, lr: 6.23e-04 +2022-05-27 19:40:22,681 INFO [train.py:823] (1/4) Epoch 28, batch 0, loss[loss=0.1818, simple_loss=0.278, pruned_loss=0.04282, over 7195.00 frames.], tot_loss[loss=0.1818, simple_loss=0.278, pruned_loss=0.04282, over 7195.00 frames.], batch size: 20, lr: 6.11e-04 +2022-05-27 19:41:02,199 INFO [train.py:823] (1/4) Epoch 28, batch 50, loss[loss=0.1658, simple_loss=0.2614, pruned_loss=0.03512, over 7105.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2534, pruned_loss=0.03896, over 316042.98 frames.], batch size: 20, lr: 6.11e-04 +2022-05-27 19:41:41,842 INFO [train.py:823] (1/4) Epoch 28, batch 100, loss[loss=0.1759, simple_loss=0.264, pruned_loss=0.04392, over 7015.00 frames.], tot_loss[loss=0.1662, simple_loss=0.255, pruned_loss=0.03874, over 560482.98 frames.], batch size: 26, lr: 6.10e-04 +2022-05-27 19:42:21,124 INFO [train.py:823] (1/4) Epoch 28, batch 150, loss[loss=0.2055, simple_loss=0.2876, pruned_loss=0.06171, over 4952.00 frames.], tot_loss[loss=0.1677, simple_loss=0.256, pruned_loss=0.03968, over 748868.88 frames.], batch size: 46, lr: 6.09e-04 +2022-05-27 19:43:00,525 INFO [train.py:823] (1/4) Epoch 28, batch 200, loss[loss=0.1825, simple_loss=0.2718, pruned_loss=0.04657, over 7193.00 frames.], tot_loss[loss=0.1667, simple_loss=0.255, pruned_loss=0.03924, over 898760.56 frames.], batch size: 20, lr: 6.09e-04 +2022-05-27 19:43:39,764 INFO [train.py:823] (1/4) Epoch 28, batch 250, loss[loss=0.19, simple_loss=0.2902, pruned_loss=0.0449, over 7340.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2557, pruned_loss=0.03929, over 1015232.31 frames.], batch size: 23, lr: 6.08e-04 +2022-05-27 19:44:19,094 INFO [train.py:823] (1/4) Epoch 28, batch 300, loss[loss=0.2047, simple_loss=0.293, pruned_loss=0.05814, over 6981.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2571, pruned_loss=0.04, over 1103615.78 frames.], batch size: 29, lr: 6.08e-04 +2022-05-27 19:44:58,539 INFO [train.py:823] (1/4) Epoch 28, batch 350, loss[loss=0.2043, simple_loss=0.2935, pruned_loss=0.05757, over 7343.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2564, pruned_loss=0.03967, over 1174853.01 frames.], batch size: 23, lr: 6.07e-04 +2022-05-27 19:45:37,877 INFO [train.py:823] (1/4) Epoch 28, batch 400, loss[loss=0.1849, simple_loss=0.2782, pruned_loss=0.04575, over 7285.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2562, pruned_loss=0.03963, over 1230168.73 frames.], batch size: 21, lr: 6.07e-04 +2022-05-27 19:46:16,835 INFO [train.py:823] (1/4) Epoch 28, batch 450, loss[loss=0.1424, simple_loss=0.2372, pruned_loss=0.02377, over 6919.00 frames.], tot_loss[loss=0.167, simple_loss=0.2558, pruned_loss=0.03914, over 1270022.25 frames.], batch size: 29, lr: 6.06e-04 +2022-05-27 19:46:56,129 INFO [train.py:823] (1/4) Epoch 28, batch 500, loss[loss=0.1884, simple_loss=0.2686, pruned_loss=0.0541, over 6888.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2563, pruned_loss=0.03948, over 1307104.60 frames.], batch size: 29, lr: 6.06e-04 +2022-05-27 19:47:35,319 INFO [train.py:823] (1/4) Epoch 28, batch 550, loss[loss=0.1466, simple_loss=0.251, pruned_loss=0.02107, over 7110.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2568, pruned_loss=0.03941, over 1330486.92 frames.], batch size: 20, lr: 6.05e-04 +2022-05-27 19:48:14,479 INFO [train.py:823] (1/4) Epoch 28, batch 600, loss[loss=0.1596, simple_loss=0.2525, pruned_loss=0.03335, over 7199.00 frames.], tot_loss[loss=0.168, simple_loss=0.2574, pruned_loss=0.03932, over 1348742.42 frames.], batch size: 19, lr: 6.04e-04 +2022-05-27 19:48:53,682 INFO [train.py:823] (1/4) Epoch 28, batch 650, loss[loss=0.1581, simple_loss=0.246, pruned_loss=0.0351, over 7291.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2572, pruned_loss=0.03973, over 1367977.07 frames.], batch size: 19, lr: 6.04e-04 +2022-05-27 19:49:34,148 INFO [train.py:823] (1/4) Epoch 28, batch 700, loss[loss=0.1585, simple_loss=0.2336, pruned_loss=0.04169, over 7319.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2577, pruned_loss=0.04003, over 1377176.08 frames.], batch size: 18, lr: 6.03e-04 +2022-05-27 19:50:13,239 INFO [train.py:823] (1/4) Epoch 28, batch 750, loss[loss=0.1892, simple_loss=0.2709, pruned_loss=0.05374, over 4781.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2578, pruned_loss=0.03979, over 1383920.30 frames.], batch size: 47, lr: 6.03e-04 +2022-05-27 19:50:52,557 INFO [train.py:823] (1/4) Epoch 28, batch 800, loss[loss=0.154, simple_loss=0.238, pruned_loss=0.03507, over 7011.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2578, pruned_loss=0.03987, over 1395314.84 frames.], batch size: 16, lr: 6.02e-04 +2022-05-27 19:51:31,400 INFO [train.py:823] (1/4) Epoch 28, batch 850, loss[loss=0.1571, simple_loss=0.2595, pruned_loss=0.02736, over 7371.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2584, pruned_loss=0.0401, over 1399590.19 frames.], batch size: 21, lr: 6.02e-04 +2022-05-27 19:52:10,735 INFO [train.py:823] (1/4) Epoch 28, batch 900, loss[loss=0.1587, simple_loss=0.241, pruned_loss=0.03815, over 7361.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2582, pruned_loss=0.0402, over 1401536.03 frames.], batch size: 21, lr: 6.01e-04 +2022-05-27 19:53:03,342 INFO [train.py:823] (1/4) Epoch 29, batch 0, loss[loss=0.18, simple_loss=0.2646, pruned_loss=0.04775, over 6991.00 frames.], tot_loss[loss=0.18, simple_loss=0.2646, pruned_loss=0.04775, over 6991.00 frames.], batch size: 26, lr: 5.90e-04 +2022-05-27 19:53:42,701 INFO [train.py:823] (1/4) Epoch 29, batch 50, loss[loss=0.1577, simple_loss=0.249, pruned_loss=0.03322, over 7290.00 frames.], tot_loss[loss=0.1667, simple_loss=0.254, pruned_loss=0.0397, over 321614.85 frames.], batch size: 21, lr: 5.90e-04 +2022-05-27 19:54:22,196 INFO [train.py:823] (1/4) Epoch 29, batch 100, loss[loss=0.1638, simple_loss=0.2607, pruned_loss=0.03349, over 7218.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2562, pruned_loss=0.03944, over 570173.58 frames.], batch size: 24, lr: 5.89e-04 +2022-05-27 19:55:01,922 INFO [train.py:823] (1/4) Epoch 29, batch 150, loss[loss=0.1525, simple_loss=0.2382, pruned_loss=0.03341, over 7296.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2571, pruned_loss=0.04006, over 760350.01 frames.], batch size: 19, lr: 5.89e-04 +2022-05-27 19:55:40,945 INFO [train.py:823] (1/4) Epoch 29, batch 200, loss[loss=0.1886, simple_loss=0.2808, pruned_loss=0.0482, over 7333.00 frames.], tot_loss[loss=0.1687, simple_loss=0.258, pruned_loss=0.03973, over 900377.09 frames.], batch size: 23, lr: 5.88e-04 +2022-05-27 19:56:21,511 INFO [train.py:823] (1/4) Epoch 29, batch 250, loss[loss=0.1822, simple_loss=0.267, pruned_loss=0.04873, over 7385.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2559, pruned_loss=0.03886, over 1016050.78 frames.], batch size: 19, lr: 5.88e-04 +2022-05-27 19:57:00,569 INFO [train.py:823] (1/4) Epoch 29, batch 300, loss[loss=0.1721, simple_loss=0.2582, pruned_loss=0.04302, over 7264.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2556, pruned_loss=0.03886, over 1105419.52 frames.], batch size: 20, lr: 5.87e-04 +2022-05-27 19:57:40,046 INFO [train.py:823] (1/4) Epoch 29, batch 350, loss[loss=0.1563, simple_loss=0.236, pruned_loss=0.03824, over 6836.00 frames.], tot_loss[loss=0.1673, simple_loss=0.2562, pruned_loss=0.03918, over 1173334.95 frames.], batch size: 15, lr: 5.87e-04 +2022-05-27 19:58:19,096 INFO [train.py:823] (1/4) Epoch 29, batch 400, loss[loss=0.1334, simple_loss=0.2181, pruned_loss=0.02439, over 7298.00 frames.], tot_loss[loss=0.168, simple_loss=0.2572, pruned_loss=0.03935, over 1229024.77 frames.], batch size: 17, lr: 5.86e-04 +2022-05-27 19:58:59,810 INFO [train.py:823] (1/4) Epoch 29, batch 450, loss[loss=0.1394, simple_loss=0.2276, pruned_loss=0.02564, over 7098.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2561, pruned_loss=0.0392, over 1269611.28 frames.], batch size: 18, lr: 5.85e-04 +2022-05-27 19:59:40,114 INFO [train.py:823] (1/4) Epoch 29, batch 500, loss[loss=0.1644, simple_loss=0.2607, pruned_loss=0.03407, over 7112.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2561, pruned_loss=0.03938, over 1297325.97 frames.], batch size: 20, lr: 5.85e-04 +2022-05-27 20:00:19,254 INFO [train.py:823] (1/4) Epoch 29, batch 550, loss[loss=0.1962, simple_loss=0.2831, pruned_loss=0.05465, over 6513.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2556, pruned_loss=0.03905, over 1326624.61 frames.], batch size: 34, lr: 5.84e-04 +2022-05-27 20:00:58,214 INFO [train.py:823] (1/4) Epoch 29, batch 600, loss[loss=0.1649, simple_loss=0.2524, pruned_loss=0.03875, over 6280.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2564, pruned_loss=0.03957, over 1346807.57 frames.], batch size: 34, lr: 5.84e-04 +2022-05-27 20:01:37,807 INFO [train.py:823] (1/4) Epoch 29, batch 650, loss[loss=0.1724, simple_loss=0.2769, pruned_loss=0.0339, over 7363.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2573, pruned_loss=0.04028, over 1363736.47 frames.], batch size: 20, lr: 5.83e-04 +2022-05-27 20:02:16,353 INFO [train.py:823] (1/4) Epoch 29, batch 700, loss[loss=0.1565, simple_loss=0.2433, pruned_loss=0.03487, over 7187.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2564, pruned_loss=0.03968, over 1371546.54 frames.], batch size: 19, lr: 5.83e-04 +2022-05-27 20:02:55,464 INFO [train.py:823] (1/4) Epoch 29, batch 750, loss[loss=0.1924, simple_loss=0.2741, pruned_loss=0.05539, over 5194.00 frames.], tot_loss[loss=0.168, simple_loss=0.2567, pruned_loss=0.03966, over 1379889.50 frames.], batch size: 46, lr: 5.82e-04 +2022-05-27 20:03:34,140 INFO [train.py:823] (1/4) Epoch 29, batch 800, loss[loss=0.1463, simple_loss=0.2266, pruned_loss=0.03294, over 7194.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2559, pruned_loss=0.0392, over 1387165.44 frames.], batch size: 18, lr: 5.82e-04 +2022-05-27 20:04:13,234 INFO [train.py:823] (1/4) Epoch 29, batch 850, loss[loss=0.1668, simple_loss=0.2618, pruned_loss=0.03586, over 7213.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2562, pruned_loss=0.03878, over 1397147.17 frames.], batch size: 24, lr: 5.81e-04 +2022-05-27 20:04:52,057 INFO [train.py:823] (1/4) Epoch 29, batch 900, loss[loss=0.1883, simple_loss=0.2887, pruned_loss=0.04393, over 7174.00 frames.], tot_loss[loss=0.167, simple_loss=0.2566, pruned_loss=0.03875, over 1395905.77 frames.], batch size: 22, lr: 5.81e-04 +2022-05-27 20:05:30,820 INFO [train.py:823] (1/4) Epoch 29, batch 950, loss[loss=0.2144, simple_loss=0.3012, pruned_loss=0.06385, over 4601.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2564, pruned_loss=0.03856, over 1389768.49 frames.], batch size: 46, lr: 5.80e-04 +2022-05-27 20:05:46,286 INFO [train.py:823] (1/4) Epoch 30, batch 0, loss[loss=0.1612, simple_loss=0.2481, pruned_loss=0.03719, over 7371.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2481, pruned_loss=0.03719, over 7371.00 frames.], batch size: 20, lr: 5.71e-04 +2022-05-27 20:06:25,427 INFO [train.py:823] (1/4) Epoch 30, batch 50, loss[loss=0.1526, simple_loss=0.2472, pruned_loss=0.02906, over 7091.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2531, pruned_loss=0.03774, over 315254.47 frames.], batch size: 19, lr: 5.70e-04 +2022-05-27 20:07:04,812 INFO [train.py:823] (1/4) Epoch 30, batch 100, loss[loss=0.1597, simple_loss=0.2325, pruned_loss=0.04343, over 7298.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2535, pruned_loss=0.0378, over 561893.02 frames.], batch size: 17, lr: 5.70e-04 +2022-05-27 20:07:43,662 INFO [train.py:823] (1/4) Epoch 30, batch 150, loss[loss=0.1854, simple_loss=0.2746, pruned_loss=0.04807, over 7172.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2567, pruned_loss=0.03955, over 753969.28 frames.], batch size: 23, lr: 5.69e-04 +2022-05-27 20:08:23,061 INFO [train.py:823] (1/4) Epoch 30, batch 200, loss[loss=0.1787, simple_loss=0.2648, pruned_loss=0.04625, over 7151.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2564, pruned_loss=0.03927, over 901728.81 frames.], batch size: 23, lr: 5.69e-04 +2022-05-27 20:09:02,236 INFO [train.py:823] (1/4) Epoch 30, batch 250, loss[loss=0.1591, simple_loss=0.2497, pruned_loss=0.03424, over 7107.00 frames.], tot_loss[loss=0.1681, simple_loss=0.2569, pruned_loss=0.03962, over 1013849.69 frames.], batch size: 19, lr: 5.68e-04 +2022-05-27 20:09:41,418 INFO [train.py:823] (1/4) Epoch 30, batch 300, loss[loss=0.1454, simple_loss=0.2315, pruned_loss=0.02966, over 7152.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2577, pruned_loss=0.03995, over 1107294.18 frames.], batch size: 17, lr: 5.68e-04 +2022-05-27 20:10:20,387 INFO [train.py:823] (1/4) Epoch 30, batch 350, loss[loss=0.1769, simple_loss=0.2639, pruned_loss=0.04491, over 7222.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2564, pruned_loss=0.03964, over 1177324.74 frames.], batch size: 24, lr: 5.67e-04 +2022-05-27 20:10:59,226 INFO [train.py:823] (1/4) Epoch 30, batch 400, loss[loss=0.182, simple_loss=0.2817, pruned_loss=0.04117, over 7011.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2563, pruned_loss=0.03937, over 1231396.43 frames.], batch size: 26, lr: 5.67e-04 +2022-05-27 20:11:38,360 INFO [train.py:823] (1/4) Epoch 30, batch 450, loss[loss=0.1865, simple_loss=0.272, pruned_loss=0.0505, over 6950.00 frames.], tot_loss[loss=0.1672, simple_loss=0.256, pruned_loss=0.03919, over 1270014.48 frames.], batch size: 29, lr: 5.66e-04 +2022-05-27 20:12:17,440 INFO [train.py:823] (1/4) Epoch 30, batch 500, loss[loss=0.1577, simple_loss=0.2494, pruned_loss=0.03301, over 7101.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2553, pruned_loss=0.03891, over 1303479.35 frames.], batch size: 19, lr: 5.66e-04 +2022-05-27 20:12:56,728 INFO [train.py:823] (1/4) Epoch 30, batch 550, loss[loss=0.1746, simple_loss=0.2692, pruned_loss=0.03999, over 7416.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2555, pruned_loss=0.03909, over 1327013.04 frames.], batch size: 22, lr: 5.65e-04 +2022-05-27 20:13:37,207 INFO [train.py:823] (1/4) Epoch 30, batch 600, loss[loss=0.1583, simple_loss=0.2435, pruned_loss=0.03655, over 7199.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2555, pruned_loss=0.03914, over 1345070.47 frames.], batch size: 19, lr: 5.65e-04 +2022-05-27 20:14:16,353 INFO [train.py:823] (1/4) Epoch 30, batch 650, loss[loss=0.155, simple_loss=0.2534, pruned_loss=0.02834, over 7418.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2549, pruned_loss=0.03891, over 1358743.48 frames.], batch size: 22, lr: 5.64e-04 +2022-05-27 20:14:55,818 INFO [train.py:823] (1/4) Epoch 30, batch 700, loss[loss=0.1395, simple_loss=0.2354, pruned_loss=0.0218, over 7289.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2538, pruned_loss=0.03869, over 1376611.80 frames.], batch size: 19, lr: 5.64e-04 +2022-05-27 20:15:34,800 INFO [train.py:823] (1/4) Epoch 30, batch 750, loss[loss=0.1477, simple_loss=0.2259, pruned_loss=0.03471, over 7091.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2539, pruned_loss=0.03853, over 1382854.50 frames.], batch size: 18, lr: 5.63e-04 +2022-05-27 20:16:13,856 INFO [train.py:823] (1/4) Epoch 30, batch 800, loss[loss=0.1637, simple_loss=0.2509, pruned_loss=0.03828, over 6999.00 frames.], tot_loss[loss=0.1652, simple_loss=0.254, pruned_loss=0.03817, over 1392164.62 frames.], batch size: 26, lr: 5.63e-04 +2022-05-27 20:16:52,895 INFO [train.py:823] (1/4) Epoch 30, batch 850, loss[loss=0.1756, simple_loss=0.2563, pruned_loss=0.04748, over 7178.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2544, pruned_loss=0.03839, over 1390274.64 frames.], batch size: 18, lr: 5.62e-04 +2022-05-27 20:17:32,060 INFO [train.py:823] (1/4) Epoch 30, batch 900, loss[loss=0.1727, simple_loss=0.2506, pruned_loss=0.04741, over 7296.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2545, pruned_loss=0.03811, over 1394558.73 frames.], batch size: 19, lr: 5.62e-04 +2022-05-27 20:18:24,260 INFO [train.py:823] (1/4) Epoch 31, batch 0, loss[loss=0.1522, simple_loss=0.239, pruned_loss=0.0327, over 7364.00 frames.], tot_loss[loss=0.1522, simple_loss=0.239, pruned_loss=0.0327, over 7364.00 frames.], batch size: 20, lr: 5.52e-04 +2022-05-27 20:19:03,838 INFO [train.py:823] (1/4) Epoch 31, batch 50, loss[loss=0.1491, simple_loss=0.234, pruned_loss=0.03211, over 7188.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2543, pruned_loss=0.03975, over 324532.96 frames.], batch size: 18, lr: 5.52e-04 +2022-05-27 20:19:44,319 INFO [train.py:823] (1/4) Epoch 31, batch 100, loss[loss=0.1452, simple_loss=0.2267, pruned_loss=0.03179, over 6774.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2539, pruned_loss=0.03812, over 564395.66 frames.], batch size: 15, lr: 5.51e-04 +2022-05-27 20:20:23,595 INFO [train.py:823] (1/4) Epoch 31, batch 150, loss[loss=0.1745, simple_loss=0.2654, pruned_loss=0.04176, over 7200.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2544, pruned_loss=0.03794, over 753399.94 frames.], batch size: 25, lr: 5.51e-04 +2022-05-27 20:21:02,307 INFO [train.py:823] (1/4) Epoch 31, batch 200, loss[loss=0.1789, simple_loss=0.2562, pruned_loss=0.05077, over 7103.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2543, pruned_loss=0.03792, over 897763.93 frames.], batch size: 18, lr: 5.50e-04 +2022-05-27 20:21:41,486 INFO [train.py:823] (1/4) Epoch 31, batch 250, loss[loss=0.1623, simple_loss=0.238, pruned_loss=0.04325, over 7162.00 frames.], tot_loss[loss=0.165, simple_loss=0.2538, pruned_loss=0.03814, over 1004158.72 frames.], batch size: 17, lr: 5.50e-04 +2022-05-27 20:22:21,765 INFO [train.py:823] (1/4) Epoch 31, batch 300, loss[loss=0.1876, simple_loss=0.2857, pruned_loss=0.04476, over 7298.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2538, pruned_loss=0.03846, over 1096473.70 frames.], batch size: 22, lr: 5.49e-04 +2022-05-27 20:23:00,946 INFO [train.py:823] (1/4) Epoch 31, batch 350, loss[loss=0.1276, simple_loss=0.2191, pruned_loss=0.01802, over 7144.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2542, pruned_loss=0.03849, over 1163535.32 frames.], batch size: 17, lr: 5.49e-04 +2022-05-27 20:23:41,355 INFO [train.py:823] (1/4) Epoch 31, batch 400, loss[loss=0.1432, simple_loss=0.2312, pruned_loss=0.02756, over 7395.00 frames.], tot_loss[loss=0.165, simple_loss=0.2539, pruned_loss=0.03805, over 1224975.80 frames.], batch size: 19, lr: 5.49e-04 +2022-05-27 20:24:20,607 INFO [train.py:823] (1/4) Epoch 31, batch 450, loss[loss=0.1466, simple_loss=0.2309, pruned_loss=0.03113, over 7317.00 frames.], tot_loss[loss=0.165, simple_loss=0.2541, pruned_loss=0.03794, over 1269708.62 frames.], batch size: 18, lr: 5.48e-04 +2022-05-27 20:24:59,802 INFO [train.py:823] (1/4) Epoch 31, batch 500, loss[loss=0.1417, simple_loss=0.2349, pruned_loss=0.02428, over 7089.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2531, pruned_loss=0.03755, over 1302170.06 frames.], batch size: 18, lr: 5.48e-04 +2022-05-27 20:25:39,397 INFO [train.py:823] (1/4) Epoch 31, batch 550, loss[loss=0.137, simple_loss=0.228, pruned_loss=0.02303, over 7391.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2521, pruned_loss=0.0372, over 1326765.29 frames.], batch size: 19, lr: 5.47e-04 +2022-05-27 20:26:18,510 INFO [train.py:823] (1/4) Epoch 31, batch 600, loss[loss=0.1679, simple_loss=0.2559, pruned_loss=0.03999, over 7251.00 frames.], tot_loss[loss=0.1643, simple_loss=0.253, pruned_loss=0.03778, over 1347424.59 frames.], batch size: 16, lr: 5.47e-04 +2022-05-27 20:26:57,598 INFO [train.py:823] (1/4) Epoch 31, batch 650, loss[loss=0.1927, simple_loss=0.2794, pruned_loss=0.05301, over 7156.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2529, pruned_loss=0.03775, over 1362802.05 frames.], batch size: 22, lr: 5.46e-04 +2022-05-27 20:27:36,410 INFO [train.py:823] (1/4) Epoch 31, batch 700, loss[loss=0.1324, simple_loss=0.2163, pruned_loss=0.02422, over 7314.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2542, pruned_loss=0.03827, over 1370974.74 frames.], batch size: 17, lr: 5.46e-04 +2022-05-27 20:28:15,514 INFO [train.py:823] (1/4) Epoch 31, batch 750, loss[loss=0.1487, simple_loss=0.2227, pruned_loss=0.03734, over 7302.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2542, pruned_loss=0.03782, over 1382599.02 frames.], batch size: 18, lr: 5.45e-04 +2022-05-27 20:28:54,274 INFO [train.py:823] (1/4) Epoch 31, batch 800, loss[loss=0.1693, simple_loss=0.2445, pruned_loss=0.04699, over 7212.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2541, pruned_loss=0.0378, over 1392944.02 frames.], batch size: 16, lr: 5.45e-04 +2022-05-27 20:29:32,788 INFO [train.py:823] (1/4) Epoch 31, batch 850, loss[loss=0.176, simple_loss=0.2715, pruned_loss=0.04022, over 6978.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2545, pruned_loss=0.03826, over 1392245.08 frames.], batch size: 26, lr: 5.44e-04 +2022-05-27 20:30:11,756 INFO [train.py:823] (1/4) Epoch 31, batch 900, loss[loss=0.149, simple_loss=0.2433, pruned_loss=0.02738, over 7096.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2559, pruned_loss=0.03884, over 1396579.60 frames.], batch size: 19, lr: 5.44e-04 +2022-05-27 20:31:03,427 INFO [train.py:823] (1/4) Epoch 32, batch 0, loss[loss=0.1556, simple_loss=0.2519, pruned_loss=0.02965, over 5130.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2519, pruned_loss=0.02965, over 5130.00 frames.], batch size: 46, lr: 5.35e-04 +2022-05-27 20:31:42,672 INFO [train.py:823] (1/4) Epoch 32, batch 50, loss[loss=0.1403, simple_loss=0.2181, pruned_loss=0.03124, over 7293.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2533, pruned_loss=0.03805, over 319734.13 frames.], batch size: 17, lr: 5.35e-04 +2022-05-27 20:32:21,580 INFO [train.py:823] (1/4) Epoch 32, batch 100, loss[loss=0.1823, simple_loss=0.2637, pruned_loss=0.05051, over 7163.00 frames.], tot_loss[loss=0.1661, simple_loss=0.255, pruned_loss=0.03864, over 566110.36 frames.], batch size: 22, lr: 5.34e-04 +2022-05-27 20:33:00,028 INFO [train.py:823] (1/4) Epoch 32, batch 150, loss[loss=0.1576, simple_loss=0.2462, pruned_loss=0.03454, over 7198.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2554, pruned_loss=0.039, over 759161.26 frames.], batch size: 19, lr: 5.34e-04 +2022-05-27 20:33:39,216 INFO [train.py:823] (1/4) Epoch 32, batch 200, loss[loss=0.1548, simple_loss=0.2528, pruned_loss=0.02846, over 7204.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2558, pruned_loss=0.03841, over 905295.43 frames.], batch size: 19, lr: 5.33e-04 +2022-05-27 20:34:18,159 INFO [train.py:823] (1/4) Epoch 32, batch 250, loss[loss=0.1857, simple_loss=0.2681, pruned_loss=0.05164, over 7195.00 frames.], tot_loss[loss=0.1649, simple_loss=0.254, pruned_loss=0.03797, over 1022807.81 frames.], batch size: 19, lr: 5.33e-04 +2022-05-27 20:34:57,822 INFO [train.py:823] (1/4) Epoch 32, batch 300, loss[loss=0.1678, simple_loss=0.2573, pruned_loss=0.0392, over 7293.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2534, pruned_loss=0.03785, over 1107898.40 frames.], batch size: 19, lr: 5.32e-04 +2022-05-27 20:35:36,758 INFO [train.py:823] (1/4) Epoch 32, batch 350, loss[loss=0.1828, simple_loss=0.2622, pruned_loss=0.05173, over 7005.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2541, pruned_loss=0.03852, over 1176770.47 frames.], batch size: 16, lr: 5.32e-04 +2022-05-27 20:36:16,008 INFO [train.py:823] (1/4) Epoch 32, batch 400, loss[loss=0.1724, simple_loss=0.2627, pruned_loss=0.04111, over 6546.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2546, pruned_loss=0.03825, over 1226708.22 frames.], batch size: 34, lr: 5.32e-04 +2022-05-27 20:36:54,888 INFO [train.py:823] (1/4) Epoch 32, batch 450, loss[loss=0.1891, simple_loss=0.2749, pruned_loss=0.05161, over 7139.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2536, pruned_loss=0.03778, over 1267333.02 frames.], batch size: 23, lr: 5.31e-04 +2022-05-27 20:37:35,384 INFO [train.py:823] (1/4) Epoch 32, batch 500, loss[loss=0.1481, simple_loss=0.2409, pruned_loss=0.02768, over 7196.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2527, pruned_loss=0.03715, over 1300778.41 frames.], batch size: 20, lr: 5.31e-04 +2022-05-27 20:38:14,342 INFO [train.py:823] (1/4) Epoch 32, batch 550, loss[loss=0.1763, simple_loss=0.2706, pruned_loss=0.04103, over 7167.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2535, pruned_loss=0.03715, over 1328992.95 frames.], batch size: 25, lr: 5.30e-04 +2022-05-27 20:38:53,722 INFO [train.py:823] (1/4) Epoch 32, batch 600, loss[loss=0.142, simple_loss=0.2196, pruned_loss=0.03224, over 7298.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2548, pruned_loss=0.03785, over 1349653.36 frames.], batch size: 17, lr: 5.30e-04 +2022-05-27 20:39:32,594 INFO [train.py:823] (1/4) Epoch 32, batch 650, loss[loss=0.1726, simple_loss=0.2696, pruned_loss=0.03778, over 7032.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2548, pruned_loss=0.03772, over 1362378.25 frames.], batch size: 26, lr: 5.29e-04 +2022-05-27 20:40:11,815 INFO [train.py:823] (1/4) Epoch 32, batch 700, loss[loss=0.1578, simple_loss=0.2459, pruned_loss=0.03489, over 7102.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2538, pruned_loss=0.03753, over 1378298.47 frames.], batch size: 20, lr: 5.29e-04 +2022-05-27 20:40:50,463 INFO [train.py:823] (1/4) Epoch 32, batch 750, loss[loss=0.145, simple_loss=0.2252, pruned_loss=0.03245, over 7392.00 frames.], tot_loss[loss=0.164, simple_loss=0.2532, pruned_loss=0.03739, over 1388944.26 frames.], batch size: 19, lr: 5.29e-04 +2022-05-27 20:41:30,210 INFO [train.py:823] (1/4) Epoch 32, batch 800, loss[loss=0.1415, simple_loss=0.2186, pruned_loss=0.03219, over 7165.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2528, pruned_loss=0.03726, over 1397253.45 frames.], batch size: 17, lr: 5.28e-04 +2022-05-27 20:42:10,590 INFO [train.py:823] (1/4) Epoch 32, batch 850, loss[loss=0.1247, simple_loss=0.2082, pruned_loss=0.02058, over 7418.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2528, pruned_loss=0.0375, over 1400927.30 frames.], batch size: 18, lr: 5.28e-04 +2022-05-27 20:42:49,937 INFO [train.py:823] (1/4) Epoch 32, batch 900, loss[loss=0.1486, simple_loss=0.2327, pruned_loss=0.03226, over 7027.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2537, pruned_loss=0.03795, over 1405370.82 frames.], batch size: 17, lr: 5.27e-04 +2022-05-27 20:43:43,939 INFO [train.py:823] (1/4) Epoch 33, batch 0, loss[loss=0.1636, simple_loss=0.2481, pruned_loss=0.03959, over 7040.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2481, pruned_loss=0.03959, over 7040.00 frames.], batch size: 29, lr: 5.19e-04 +2022-05-27 20:44:22,784 INFO [train.py:823] (1/4) Epoch 33, batch 50, loss[loss=0.14, simple_loss=0.2271, pruned_loss=0.02639, over 7153.00 frames.], tot_loss[loss=0.164, simple_loss=0.2533, pruned_loss=0.03731, over 317680.44 frames.], batch size: 17, lr: 5.18e-04 +2022-05-27 20:45:02,601 INFO [train.py:823] (1/4) Epoch 33, batch 100, loss[loss=0.1489, simple_loss=0.2246, pruned_loss=0.03662, over 6820.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2509, pruned_loss=0.03646, over 562243.72 frames.], batch size: 15, lr: 5.18e-04 +2022-05-27 20:45:41,685 INFO [train.py:823] (1/4) Epoch 33, batch 150, loss[loss=0.1471, simple_loss=0.2454, pruned_loss=0.02447, over 7193.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2533, pruned_loss=0.037, over 751473.00 frames.], batch size: 21, lr: 5.18e-04 +2022-05-27 20:46:21,839 INFO [train.py:823] (1/4) Epoch 33, batch 200, loss[loss=0.1999, simple_loss=0.2887, pruned_loss=0.05554, over 7116.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2531, pruned_loss=0.0371, over 893384.96 frames.], batch size: 20, lr: 5.17e-04 +2022-05-27 20:47:00,808 INFO [train.py:823] (1/4) Epoch 33, batch 250, loss[loss=0.1816, simple_loss=0.2764, pruned_loss=0.04339, over 7147.00 frames.], tot_loss[loss=0.1632, simple_loss=0.2529, pruned_loss=0.0368, over 1014057.98 frames.], batch size: 23, lr: 5.17e-04 +2022-05-27 20:47:39,819 INFO [train.py:823] (1/4) Epoch 33, batch 300, loss[loss=0.1556, simple_loss=0.2395, pruned_loss=0.03587, over 7165.00 frames.], tot_loss[loss=0.163, simple_loss=0.2524, pruned_loss=0.03684, over 1107514.58 frames.], batch size: 17, lr: 5.16e-04 +2022-05-27 20:48:18,981 INFO [train.py:823] (1/4) Epoch 33, batch 350, loss[loss=0.1721, simple_loss=0.2644, pruned_loss=0.03991, over 7323.00 frames.], tot_loss[loss=0.1634, simple_loss=0.253, pruned_loss=0.03683, over 1177112.87 frames.], batch size: 23, lr: 5.16e-04 +2022-05-27 20:48:57,906 INFO [train.py:823] (1/4) Epoch 33, batch 400, loss[loss=0.1756, simple_loss=0.2727, pruned_loss=0.03926, over 7420.00 frames.], tot_loss[loss=0.164, simple_loss=0.2535, pruned_loss=0.03725, over 1231630.49 frames.], batch size: 22, lr: 5.16e-04 +2022-05-27 20:49:36,991 INFO [train.py:823] (1/4) Epoch 33, batch 450, loss[loss=0.1478, simple_loss=0.2421, pruned_loss=0.02674, over 7296.00 frames.], tot_loss[loss=0.164, simple_loss=0.2538, pruned_loss=0.03717, over 1272769.39 frames.], batch size: 19, lr: 5.15e-04 +2022-05-27 20:50:15,629 INFO [train.py:823] (1/4) Epoch 33, batch 500, loss[loss=0.1622, simple_loss=0.2499, pruned_loss=0.03722, over 6956.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2539, pruned_loss=0.03724, over 1307132.83 frames.], batch size: 29, lr: 5.15e-04 +2022-05-27 20:50:54,726 INFO [train.py:823] (1/4) Epoch 33, batch 550, loss[loss=0.156, simple_loss=0.25, pruned_loss=0.03103, over 7395.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2535, pruned_loss=0.03684, over 1335630.37 frames.], batch size: 19, lr: 5.14e-04 +2022-05-27 20:51:33,947 INFO [train.py:823] (1/4) Epoch 33, batch 600, loss[loss=0.1556, simple_loss=0.2612, pruned_loss=0.02503, over 7430.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2521, pruned_loss=0.03643, over 1353874.26 frames.], batch size: 22, lr: 5.14e-04 +2022-05-27 20:52:12,812 INFO [train.py:823] (1/4) Epoch 33, batch 650, loss[loss=0.1483, simple_loss=0.2221, pruned_loss=0.03724, over 7157.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2516, pruned_loss=0.03646, over 1373281.00 frames.], batch size: 17, lr: 5.14e-04 +2022-05-27 20:52:51,742 INFO [train.py:823] (1/4) Epoch 33, batch 700, loss[loss=0.1568, simple_loss=0.2493, pruned_loss=0.03215, over 6434.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2518, pruned_loss=0.03647, over 1384039.91 frames.], batch size: 34, lr: 5.13e-04 +2022-05-27 20:53:30,717 INFO [train.py:823] (1/4) Epoch 33, batch 750, loss[loss=0.1877, simple_loss=0.2822, pruned_loss=0.04659, over 7194.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2537, pruned_loss=0.03756, over 1389997.00 frames.], batch size: 25, lr: 5.13e-04 +2022-05-27 20:54:09,403 INFO [train.py:823] (1/4) Epoch 33, batch 800, loss[loss=0.2049, simple_loss=0.2929, pruned_loss=0.0584, over 7179.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2546, pruned_loss=0.0378, over 1389906.96 frames.], batch size: 22, lr: 5.12e-04 +2022-05-27 20:54:48,125 INFO [train.py:823] (1/4) Epoch 33, batch 850, loss[loss=0.1387, simple_loss=0.2274, pruned_loss=0.02502, over 7077.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2539, pruned_loss=0.03771, over 1399318.53 frames.], batch size: 18, lr: 5.12e-04 +2022-05-27 20:55:26,881 INFO [train.py:823] (1/4) Epoch 33, batch 900, loss[loss=0.1308, simple_loss=0.2162, pruned_loss=0.0227, over 7001.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2538, pruned_loss=0.03751, over 1400847.70 frames.], batch size: 16, lr: 5.12e-04 +2022-05-27 20:56:18,092 INFO [train.py:823] (1/4) Epoch 34, batch 0, loss[loss=0.1974, simple_loss=0.2853, pruned_loss=0.05473, over 7192.00 frames.], tot_loss[loss=0.1974, simple_loss=0.2853, pruned_loss=0.05473, over 7192.00 frames.], batch size: 24, lr: 5.04e-04 +2022-05-27 20:56:56,737 INFO [train.py:823] (1/4) Epoch 34, batch 50, loss[loss=0.144, simple_loss=0.2303, pruned_loss=0.02882, over 7208.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2498, pruned_loss=0.03685, over 319673.07 frames.], batch size: 16, lr: 5.03e-04 +2022-05-27 20:57:36,508 INFO [train.py:823] (1/4) Epoch 34, batch 100, loss[loss=0.1728, simple_loss=0.2664, pruned_loss=0.03958, over 7277.00 frames.], tot_loss[loss=0.163, simple_loss=0.2522, pruned_loss=0.03688, over 560528.65 frames.], batch size: 21, lr: 5.03e-04 +2022-05-27 20:58:15,738 INFO [train.py:823] (1/4) Epoch 34, batch 150, loss[loss=0.165, simple_loss=0.2595, pruned_loss=0.03527, over 7299.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2527, pruned_loss=0.03636, over 753979.70 frames.], batch size: 22, lr: 5.02e-04 +2022-05-27 20:58:54,853 INFO [train.py:823] (1/4) Epoch 34, batch 200, loss[loss=0.1702, simple_loss=0.2724, pruned_loss=0.03399, over 7024.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2524, pruned_loss=0.03653, over 901824.34 frames.], batch size: 26, lr: 5.02e-04 +2022-05-27 20:59:34,241 INFO [train.py:823] (1/4) Epoch 34, batch 250, loss[loss=0.1679, simple_loss=0.2627, pruned_loss=0.03659, over 6928.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2523, pruned_loss=0.03631, over 1012062.84 frames.], batch size: 26, lr: 5.02e-04 +2022-05-27 21:00:13,158 INFO [train.py:823] (1/4) Epoch 34, batch 300, loss[loss=0.1654, simple_loss=0.2513, pruned_loss=0.03973, over 7374.00 frames.], tot_loss[loss=0.1623, simple_loss=0.252, pruned_loss=0.03633, over 1101804.28 frames.], batch size: 21, lr: 5.01e-04 +2022-05-27 21:00:53,311 INFO [train.py:823] (1/4) Epoch 34, batch 350, loss[loss=0.1616, simple_loss=0.2495, pruned_loss=0.03686, over 7111.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2527, pruned_loss=0.03646, over 1169304.57 frames.], batch size: 19, lr: 5.01e-04 +2022-05-27 21:01:32,636 INFO [train.py:823] (1/4) Epoch 34, batch 400, loss[loss=0.1728, simple_loss=0.2703, pruned_loss=0.03761, over 7294.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2523, pruned_loss=0.03614, over 1224252.14 frames.], batch size: 21, lr: 5.00e-04 +2022-05-27 21:02:11,949 INFO [train.py:823] (1/4) Epoch 34, batch 450, loss[loss=0.1597, simple_loss=0.2606, pruned_loss=0.02938, over 7289.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2525, pruned_loss=0.03649, over 1270096.95 frames.], batch size: 20, lr: 5.00e-04 +2022-05-27 21:02:51,472 INFO [train.py:823] (1/4) Epoch 34, batch 500, loss[loss=0.1692, simple_loss=0.2644, pruned_loss=0.03701, over 7166.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2521, pruned_loss=0.03627, over 1303102.39 frames.], batch size: 23, lr: 5.00e-04 +2022-05-27 21:03:31,239 INFO [train.py:823] (1/4) Epoch 34, batch 550, loss[loss=0.1746, simple_loss=0.2617, pruned_loss=0.04371, over 7200.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2522, pruned_loss=0.03618, over 1335260.47 frames.], batch size: 25, lr: 4.99e-04 +2022-05-27 21:04:10,489 INFO [train.py:823] (1/4) Epoch 34, batch 600, loss[loss=0.1376, simple_loss=0.2168, pruned_loss=0.02923, over 7300.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2528, pruned_loss=0.0364, over 1353061.58 frames.], batch size: 17, lr: 4.99e-04 +2022-05-27 21:04:51,415 INFO [train.py:823] (1/4) Epoch 34, batch 650, loss[loss=0.1742, simple_loss=0.2644, pruned_loss=0.04204, over 6971.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2515, pruned_loss=0.03631, over 1367133.36 frames.], batch size: 29, lr: 4.99e-04 +2022-05-27 21:05:35,672 INFO [train.py:823] (1/4) Epoch 34, batch 700, loss[loss=0.1651, simple_loss=0.2476, pruned_loss=0.04128, over 7372.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2515, pruned_loss=0.03655, over 1375537.17 frames.], batch size: 20, lr: 4.98e-04 +2022-05-27 21:06:14,442 INFO [train.py:823] (1/4) Epoch 34, batch 750, loss[loss=0.1496, simple_loss=0.2303, pruned_loss=0.03446, over 7008.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2518, pruned_loss=0.0365, over 1387446.09 frames.], batch size: 16, lr: 4.98e-04 +2022-05-27 21:06:53,488 INFO [train.py:823] (1/4) Epoch 34, batch 800, loss[loss=0.1282, simple_loss=0.2157, pruned_loss=0.02033, over 7212.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2518, pruned_loss=0.03657, over 1395525.39 frames.], batch size: 19, lr: 4.97e-04 +2022-05-27 21:07:32,178 INFO [train.py:823] (1/4) Epoch 34, batch 850, loss[loss=0.1695, simple_loss=0.2612, pruned_loss=0.03885, over 7374.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2518, pruned_loss=0.0365, over 1395941.53 frames.], batch size: 21, lr: 4.97e-04 +2022-05-27 21:08:12,981 INFO [train.py:823] (1/4) Epoch 34, batch 900, loss[loss=0.1564, simple_loss=0.2463, pruned_loss=0.03321, over 7102.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2512, pruned_loss=0.03584, over 1400053.35 frames.], batch size: 18, lr: 4.97e-04 +2022-05-27 21:09:07,149 INFO [train.py:823] (1/4) Epoch 35, batch 0, loss[loss=0.1881, simple_loss=0.2741, pruned_loss=0.05104, over 7186.00 frames.], tot_loss[loss=0.1881, simple_loss=0.2741, pruned_loss=0.05104, over 7186.00 frames.], batch size: 21, lr: 4.89e-04 +2022-05-27 21:09:48,000 INFO [train.py:823] (1/4) Epoch 35, batch 50, loss[loss=0.1516, simple_loss=0.2358, pruned_loss=0.03373, over 7191.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2568, pruned_loss=0.03855, over 323702.48 frames.], batch size: 18, lr: 4.89e-04 +2022-05-27 21:10:26,965 INFO [train.py:823] (1/4) Epoch 35, batch 100, loss[loss=0.1684, simple_loss=0.2614, pruned_loss=0.03768, over 6563.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2544, pruned_loss=0.03749, over 568737.04 frames.], batch size: 34, lr: 4.88e-04 +2022-05-27 21:11:06,200 INFO [train.py:823] (1/4) Epoch 35, batch 150, loss[loss=0.168, simple_loss=0.25, pruned_loss=0.04302, over 7216.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2516, pruned_loss=0.03673, over 755001.50 frames.], batch size: 25, lr: 4.88e-04 +2022-05-27 21:11:44,984 INFO [train.py:823] (1/4) Epoch 35, batch 200, loss[loss=0.154, simple_loss=0.2416, pruned_loss=0.03317, over 6868.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2512, pruned_loss=0.03656, over 904674.09 frames.], batch size: 29, lr: 4.88e-04 +2022-05-27 21:12:24,212 INFO [train.py:823] (1/4) Epoch 35, batch 250, loss[loss=0.1654, simple_loss=0.2565, pruned_loss=0.03713, over 7221.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2509, pruned_loss=0.03621, over 1013474.34 frames.], batch size: 24, lr: 4.87e-04 +2022-05-27 21:13:03,253 INFO [train.py:823] (1/4) Epoch 35, batch 300, loss[loss=0.1617, simple_loss=0.2535, pruned_loss=0.03497, over 7285.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2504, pruned_loss=0.03596, over 1106714.22 frames.], batch size: 21, lr: 4.87e-04 +2022-05-27 21:13:42,448 INFO [train.py:823] (1/4) Epoch 35, batch 350, loss[loss=0.1235, simple_loss=0.206, pruned_loss=0.02049, over 7088.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2503, pruned_loss=0.03605, over 1172875.77 frames.], batch size: 18, lr: 4.87e-04 +2022-05-27 21:14:21,163 INFO [train.py:823] (1/4) Epoch 35, batch 400, loss[loss=0.1604, simple_loss=0.2561, pruned_loss=0.03239, over 7155.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2501, pruned_loss=0.03576, over 1223339.61 frames.], batch size: 22, lr: 4.86e-04 +2022-05-27 21:15:00,099 INFO [train.py:823] (1/4) Epoch 35, batch 450, loss[loss=0.138, simple_loss=0.2212, pruned_loss=0.02743, over 7298.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2508, pruned_loss=0.0361, over 1270565.69 frames.], batch size: 17, lr: 4.86e-04 +2022-05-27 21:15:38,760 INFO [train.py:823] (1/4) Epoch 35, batch 500, loss[loss=0.1524, simple_loss=0.2346, pruned_loss=0.03514, over 7017.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2512, pruned_loss=0.03596, over 1305729.87 frames.], batch size: 17, lr: 4.86e-04 +2022-05-27 21:16:17,675 INFO [train.py:823] (1/4) Epoch 35, batch 550, loss[loss=0.1467, simple_loss=0.2304, pruned_loss=0.03147, over 7022.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2507, pruned_loss=0.03529, over 1329081.57 frames.], batch size: 17, lr: 4.85e-04 +2022-05-27 21:16:57,046 INFO [train.py:823] (1/4) Epoch 35, batch 600, loss[loss=0.1887, simple_loss=0.2771, pruned_loss=0.05015, over 7276.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2504, pruned_loss=0.03514, over 1349045.60 frames.], batch size: 20, lr: 4.85e-04 +2022-05-27 21:17:36,293 INFO [train.py:823] (1/4) Epoch 35, batch 650, loss[loss=0.181, simple_loss=0.2711, pruned_loss=0.04545, over 6971.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2503, pruned_loss=0.03555, over 1368173.90 frames.], batch size: 26, lr: 4.84e-04 +2022-05-27 21:18:15,570 INFO [train.py:823] (1/4) Epoch 35, batch 700, loss[loss=0.1753, simple_loss=0.2564, pruned_loss=0.04711, over 7274.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2498, pruned_loss=0.03516, over 1378294.64 frames.], batch size: 20, lr: 4.84e-04 +2022-05-27 21:18:54,741 INFO [train.py:823] (1/4) Epoch 35, batch 750, loss[loss=0.157, simple_loss=0.2426, pruned_loss=0.03567, over 7111.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2515, pruned_loss=0.03565, over 1391286.95 frames.], batch size: 19, lr: 4.84e-04 +2022-05-27 21:19:32,982 INFO [train.py:823] (1/4) Epoch 35, batch 800, loss[loss=0.1529, simple_loss=0.2406, pruned_loss=0.03256, over 7311.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2515, pruned_loss=0.0354, over 1395224.78 frames.], batch size: 18, lr: 4.83e-04 +2022-05-27 21:20:12,106 INFO [train.py:823] (1/4) Epoch 35, batch 850, loss[loss=0.1744, simple_loss=0.2742, pruned_loss=0.03724, over 7413.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2522, pruned_loss=0.03572, over 1404157.32 frames.], batch size: 22, lr: 4.83e-04 +2022-05-27 21:20:50,657 INFO [train.py:823] (1/4) Epoch 35, batch 900, loss[loss=0.1637, simple_loss=0.2579, pruned_loss=0.03473, over 6475.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2521, pruned_loss=0.03574, over 1401431.40 frames.], batch size: 34, lr: 4.83e-04 +2022-05-27 21:21:29,569 INFO [train.py:823] (1/4) Epoch 35, batch 950, loss[loss=0.1917, simple_loss=0.2658, pruned_loss=0.05878, over 4890.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2528, pruned_loss=0.03633, over 1379222.59 frames.], batch size: 48, lr: 4.82e-04 +2022-05-27 21:21:42,940 INFO [train.py:823] (1/4) Epoch 36, batch 0, loss[loss=0.2083, simple_loss=0.2951, pruned_loss=0.06076, over 7418.00 frames.], tot_loss[loss=0.2083, simple_loss=0.2951, pruned_loss=0.06076, over 7418.00 frames.], batch size: 22, lr: 4.76e-04 +2022-05-27 21:22:22,308 INFO [train.py:823] (1/4) Epoch 36, batch 50, loss[loss=0.1462, simple_loss=0.226, pruned_loss=0.03326, over 7171.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2469, pruned_loss=0.0343, over 318573.21 frames.], batch size: 17, lr: 4.75e-04 +2022-05-27 21:23:01,915 INFO [train.py:823] (1/4) Epoch 36, batch 100, loss[loss=0.1615, simple_loss=0.2602, pruned_loss=0.03143, over 6508.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2494, pruned_loss=0.0344, over 564379.59 frames.], batch size: 34, lr: 4.75e-04 +2022-05-27 21:23:40,587 INFO [train.py:823] (1/4) Epoch 36, batch 150, loss[loss=0.1704, simple_loss=0.2592, pruned_loss=0.04084, over 7221.00 frames.], tot_loss[loss=0.1596, simple_loss=0.25, pruned_loss=0.03464, over 751580.86 frames.], batch size: 25, lr: 4.74e-04 +2022-05-27 21:24:21,423 INFO [train.py:823] (1/4) Epoch 36, batch 200, loss[loss=0.1527, simple_loss=0.2281, pruned_loss=0.03867, over 7308.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2509, pruned_loss=0.03586, over 899331.08 frames.], batch size: 17, lr: 4.74e-04 +2022-05-27 21:25:00,000 INFO [train.py:823] (1/4) Epoch 36, batch 250, loss[loss=0.1591, simple_loss=0.246, pruned_loss=0.03611, over 7385.00 frames.], tot_loss[loss=0.1618, simple_loss=0.2508, pruned_loss=0.03639, over 1013033.39 frames.], batch size: 19, lr: 4.74e-04 +2022-05-27 21:25:39,293 INFO [train.py:823] (1/4) Epoch 36, batch 300, loss[loss=0.1569, simple_loss=0.2522, pruned_loss=0.03077, over 7347.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2494, pruned_loss=0.03579, over 1102157.87 frames.], batch size: 23, lr: 4.73e-04 +2022-05-27 21:26:18,970 INFO [train.py:823] (1/4) Epoch 36, batch 350, loss[loss=0.1444, simple_loss=0.2369, pruned_loss=0.02595, over 7374.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2492, pruned_loss=0.03548, over 1172512.04 frames.], batch size: 20, lr: 4.73e-04 +2022-05-27 21:26:58,350 INFO [train.py:823] (1/4) Epoch 36, batch 400, loss[loss=0.1466, simple_loss=0.2304, pruned_loss=0.03142, over 7104.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2506, pruned_loss=0.03562, over 1227868.36 frames.], batch size: 18, lr: 4.73e-04 +2022-05-27 21:27:39,283 INFO [train.py:823] (1/4) Epoch 36, batch 450, loss[loss=0.1757, simple_loss=0.2695, pruned_loss=0.0409, over 7012.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2508, pruned_loss=0.03599, over 1270255.07 frames.], batch size: 26, lr: 4.72e-04 +2022-05-27 21:28:18,447 INFO [train.py:823] (1/4) Epoch 36, batch 500, loss[loss=0.1749, simple_loss=0.2664, pruned_loss=0.0417, over 7200.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2506, pruned_loss=0.0356, over 1301052.62 frames.], batch size: 24, lr: 4.72e-04 +2022-05-27 21:28:57,558 INFO [train.py:823] (1/4) Epoch 36, batch 550, loss[loss=0.1453, simple_loss=0.2315, pruned_loss=0.02952, over 7297.00 frames.], tot_loss[loss=0.1605, simple_loss=0.25, pruned_loss=0.03543, over 1327820.38 frames.], batch size: 17, lr: 4.72e-04 +2022-05-27 21:29:37,086 INFO [train.py:823] (1/4) Epoch 36, batch 600, loss[loss=0.1484, simple_loss=0.2331, pruned_loss=0.03186, over 7283.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2504, pruned_loss=0.03553, over 1346225.55 frames.], batch size: 17, lr: 4.71e-04 +2022-05-27 21:30:16,460 INFO [train.py:823] (1/4) Epoch 36, batch 650, loss[loss=0.1727, simple_loss=0.2747, pruned_loss=0.03531, over 7374.00 frames.], tot_loss[loss=0.1616, simple_loss=0.2516, pruned_loss=0.03582, over 1361482.67 frames.], batch size: 21, lr: 4.71e-04 +2022-05-27 21:30:56,832 INFO [train.py:823] (1/4) Epoch 36, batch 700, loss[loss=0.1405, simple_loss=0.2226, pruned_loss=0.02916, over 7293.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2516, pruned_loss=0.03573, over 1377560.38 frames.], batch size: 17, lr: 4.71e-04 +2022-05-27 21:31:36,035 INFO [train.py:823] (1/4) Epoch 36, batch 750, loss[loss=0.1485, simple_loss=0.248, pruned_loss=0.02448, over 7285.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2504, pruned_loss=0.0354, over 1386588.53 frames.], batch size: 21, lr: 4.70e-04 +2022-05-27 21:32:16,404 INFO [train.py:823] (1/4) Epoch 36, batch 800, loss[loss=0.1683, simple_loss=0.2589, pruned_loss=0.03882, over 7373.00 frames.], tot_loss[loss=0.1612, simple_loss=0.251, pruned_loss=0.0357, over 1387730.61 frames.], batch size: 21, lr: 4.70e-04 +2022-05-27 21:32:55,449 INFO [train.py:823] (1/4) Epoch 36, batch 850, loss[loss=0.176, simple_loss=0.2647, pruned_loss=0.04372, over 7351.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2507, pruned_loss=0.03582, over 1388268.37 frames.], batch size: 23, lr: 4.70e-04 +2022-05-27 21:33:34,407 INFO [train.py:823] (1/4) Epoch 36, batch 900, loss[loss=0.1729, simple_loss=0.2661, pruned_loss=0.0399, over 7425.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2514, pruned_loss=0.0354, over 1396152.75 frames.], batch size: 22, lr: 4.69e-04 +2022-05-27 21:34:27,368 INFO [train.py:823] (1/4) Epoch 37, batch 0, loss[loss=0.1663, simple_loss=0.2629, pruned_loss=0.03483, over 6508.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2629, pruned_loss=0.03483, over 6508.00 frames.], batch size: 34, lr: 4.63e-04 +2022-05-27 21:35:06,543 INFO [train.py:823] (1/4) Epoch 37, batch 50, loss[loss=0.1493, simple_loss=0.2503, pruned_loss=0.02421, over 7309.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2529, pruned_loss=0.03477, over 319984.74 frames.], batch size: 22, lr: 4.62e-04 +2022-05-27 21:35:45,356 INFO [train.py:823] (1/4) Epoch 37, batch 100, loss[loss=0.1679, simple_loss=0.264, pruned_loss=0.03586, over 7205.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2522, pruned_loss=0.03498, over 562122.93 frames.], batch size: 24, lr: 4.62e-04 +2022-05-27 21:36:24,660 INFO [train.py:823] (1/4) Epoch 37, batch 150, loss[loss=0.1499, simple_loss=0.242, pruned_loss=0.02892, over 7188.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2524, pruned_loss=0.0357, over 751018.24 frames.], batch size: 21, lr: 4.62e-04 +2022-05-27 21:37:04,060 INFO [train.py:823] (1/4) Epoch 37, batch 200, loss[loss=0.1874, simple_loss=0.2726, pruned_loss=0.05109, over 7219.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2498, pruned_loss=0.03562, over 903887.61 frames.], batch size: 24, lr: 4.61e-04 +2022-05-27 21:37:43,502 INFO [train.py:823] (1/4) Epoch 37, batch 250, loss[loss=0.1446, simple_loss=0.2364, pruned_loss=0.02639, over 6992.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2503, pruned_loss=0.03561, over 1020635.86 frames.], batch size: 26, lr: 4.61e-04 +2022-05-27 21:38:22,752 INFO [train.py:823] (1/4) Epoch 37, batch 300, loss[loss=0.1451, simple_loss=0.2289, pruned_loss=0.0307, over 7015.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2493, pruned_loss=0.03545, over 1106265.49 frames.], batch size: 16, lr: 4.61e-04 +2022-05-27 21:39:02,443 INFO [train.py:823] (1/4) Epoch 37, batch 350, loss[loss=0.1884, simple_loss=0.2761, pruned_loss=0.05039, over 7207.00 frames.], tot_loss[loss=0.1595, simple_loss=0.249, pruned_loss=0.03493, over 1172941.57 frames.], batch size: 25, lr: 4.60e-04 +2022-05-27 21:39:41,232 INFO [train.py:823] (1/4) Epoch 37, batch 400, loss[loss=0.1594, simple_loss=0.242, pruned_loss=0.03835, over 7305.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2502, pruned_loss=0.0355, over 1229323.32 frames.], batch size: 17, lr: 4.60e-04 +2022-05-27 21:40:19,852 INFO [train.py:823] (1/4) Epoch 37, batch 450, loss[loss=0.1479, simple_loss=0.2311, pruned_loss=0.03234, over 7191.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2507, pruned_loss=0.03559, over 1268692.07 frames.], batch size: 19, lr: 4.60e-04 +2022-05-27 21:40:58,930 INFO [train.py:823] (1/4) Epoch 37, batch 500, loss[loss=0.1614, simple_loss=0.245, pruned_loss=0.03888, over 7019.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2502, pruned_loss=0.03504, over 1303751.09 frames.], batch size: 16, lr: 4.59e-04 +2022-05-27 21:41:38,554 INFO [train.py:823] (1/4) Epoch 37, batch 550, loss[loss=0.1354, simple_loss=0.2227, pruned_loss=0.02399, over 7002.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2504, pruned_loss=0.03515, over 1330586.87 frames.], batch size: 16, lr: 4.59e-04 +2022-05-27 21:42:17,368 INFO [train.py:823] (1/4) Epoch 37, batch 600, loss[loss=0.159, simple_loss=0.2485, pruned_loss=0.03481, over 7341.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2509, pruned_loss=0.03481, over 1350554.66 frames.], batch size: 23, lr: 4.59e-04 +2022-05-27 21:42:55,856 INFO [train.py:823] (1/4) Epoch 37, batch 650, loss[loss=0.1184, simple_loss=0.1978, pruned_loss=0.01953, over 7137.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2506, pruned_loss=0.03488, over 1366073.85 frames.], batch size: 17, lr: 4.58e-04 +2022-05-27 21:43:34,819 INFO [train.py:823] (1/4) Epoch 37, batch 700, loss[loss=0.1724, simple_loss=0.27, pruned_loss=0.03734, over 7418.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2509, pruned_loss=0.03515, over 1372897.19 frames.], batch size: 22, lr: 4.58e-04 +2022-05-27 21:44:14,139 INFO [train.py:823] (1/4) Epoch 37, batch 750, loss[loss=0.1563, simple_loss=0.2496, pruned_loss=0.03147, over 5131.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2514, pruned_loss=0.03546, over 1379841.16 frames.], batch size: 47, lr: 4.58e-04 +2022-05-27 21:44:53,001 INFO [train.py:823] (1/4) Epoch 37, batch 800, loss[loss=0.1813, simple_loss=0.2796, pruned_loss=0.04152, over 7284.00 frames.], tot_loss[loss=0.161, simple_loss=0.2514, pruned_loss=0.03534, over 1384503.47 frames.], batch size: 21, lr: 4.57e-04 +2022-05-27 21:45:31,917 INFO [train.py:823] (1/4) Epoch 37, batch 850, loss[loss=0.1555, simple_loss=0.2268, pruned_loss=0.04212, over 6800.00 frames.], tot_loss[loss=0.16, simple_loss=0.2498, pruned_loss=0.03509, over 1386175.43 frames.], batch size: 15, lr: 4.57e-04 +2022-05-27 21:46:10,766 INFO [train.py:823] (1/4) Epoch 37, batch 900, loss[loss=0.2267, simple_loss=0.3291, pruned_loss=0.06211, over 7148.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2487, pruned_loss=0.03479, over 1392659.44 frames.], batch size: 23, lr: 4.57e-04 +2022-05-27 21:47:05,034 INFO [train.py:823] (1/4) Epoch 38, batch 0, loss[loss=0.1529, simple_loss=0.24, pruned_loss=0.03292, over 7392.00 frames.], tot_loss[loss=0.1529, simple_loss=0.24, pruned_loss=0.03292, over 7392.00 frames.], batch size: 19, lr: 4.50e-04 +2022-05-27 21:47:44,002 INFO [train.py:823] (1/4) Epoch 38, batch 50, loss[loss=0.1622, simple_loss=0.2518, pruned_loss=0.03633, over 7112.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2476, pruned_loss=0.03509, over 321742.22 frames.], batch size: 19, lr: 4.50e-04 +2022-05-27 21:48:24,753 INFO [train.py:823] (1/4) Epoch 38, batch 100, loss[loss=0.183, simple_loss=0.2751, pruned_loss=0.04544, over 7345.00 frames.], tot_loss[loss=0.16, simple_loss=0.2499, pruned_loss=0.03508, over 565342.28 frames.], batch size: 23, lr: 4.50e-04 +2022-05-27 21:49:03,849 INFO [train.py:823] (1/4) Epoch 38, batch 150, loss[loss=0.1677, simple_loss=0.2579, pruned_loss=0.03879, over 6998.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2492, pruned_loss=0.0346, over 754084.96 frames.], batch size: 26, lr: 4.50e-04 +2022-05-27 21:49:43,228 INFO [train.py:823] (1/4) Epoch 38, batch 200, loss[loss=0.1756, simple_loss=0.2708, pruned_loss=0.04015, over 6404.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2491, pruned_loss=0.03407, over 902332.05 frames.], batch size: 34, lr: 4.49e-04 +2022-05-27 21:50:22,157 INFO [train.py:823] (1/4) Epoch 38, batch 250, loss[loss=0.1589, simple_loss=0.249, pruned_loss=0.03442, over 7117.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2485, pruned_loss=0.0342, over 1021306.89 frames.], batch size: 20, lr: 4.49e-04 +2022-05-27 21:51:02,995 INFO [train.py:823] (1/4) Epoch 38, batch 300, loss[loss=0.1538, simple_loss=0.2492, pruned_loss=0.02914, over 7289.00 frames.], tot_loss[loss=0.1595, simple_loss=0.2492, pruned_loss=0.03485, over 1107854.88 frames.], batch size: 21, lr: 4.49e-04 +2022-05-27 21:51:42,034 INFO [train.py:823] (1/4) Epoch 38, batch 350, loss[loss=0.1295, simple_loss=0.2069, pruned_loss=0.02608, over 6794.00 frames.], tot_loss[loss=0.1591, simple_loss=0.249, pruned_loss=0.03461, over 1181858.94 frames.], batch size: 15, lr: 4.48e-04 +2022-05-27 21:52:21,189 INFO [train.py:823] (1/4) Epoch 38, batch 400, loss[loss=0.1795, simple_loss=0.2569, pruned_loss=0.05108, over 4535.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2497, pruned_loss=0.03471, over 1235800.33 frames.], batch size: 46, lr: 4.48e-04 +2022-05-27 21:53:00,034 INFO [train.py:823] (1/4) Epoch 38, batch 450, loss[loss=0.1675, simple_loss=0.2533, pruned_loss=0.04086, over 7190.00 frames.], tot_loss[loss=0.159, simple_loss=0.2489, pruned_loss=0.03451, over 1280837.89 frames.], batch size: 20, lr: 4.48e-04 +2022-05-27 21:53:39,589 INFO [train.py:823] (1/4) Epoch 38, batch 500, loss[loss=0.1538, simple_loss=0.2471, pruned_loss=0.03028, over 7280.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2479, pruned_loss=0.0343, over 1314427.43 frames.], batch size: 21, lr: 4.47e-04 +2022-05-27 21:54:19,845 INFO [train.py:823] (1/4) Epoch 38, batch 550, loss[loss=0.1537, simple_loss=0.2457, pruned_loss=0.03088, over 7194.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2482, pruned_loss=0.03439, over 1331882.89 frames.], batch size: 20, lr: 4.47e-04 +2022-05-27 21:54:59,351 INFO [train.py:823] (1/4) Epoch 38, batch 600, loss[loss=0.1782, simple_loss=0.2822, pruned_loss=0.03713, over 6392.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2485, pruned_loss=0.0346, over 1350493.74 frames.], batch size: 34, lr: 4.47e-04 +2022-05-27 21:55:39,777 INFO [train.py:823] (1/4) Epoch 38, batch 650, loss[loss=0.1349, simple_loss=0.2298, pruned_loss=0.02003, over 7289.00 frames.], tot_loss[loss=0.159, simple_loss=0.2486, pruned_loss=0.03471, over 1367365.43 frames.], batch size: 20, lr: 4.46e-04 +2022-05-27 21:56:18,838 INFO [train.py:823] (1/4) Epoch 38, batch 700, loss[loss=0.1641, simple_loss=0.2618, pruned_loss=0.03322, over 7170.00 frames.], tot_loss[loss=0.16, simple_loss=0.2497, pruned_loss=0.03519, over 1376991.68 frames.], batch size: 22, lr: 4.46e-04 +2022-05-27 21:56:57,008 INFO [train.py:823] (1/4) Epoch 38, batch 750, loss[loss=0.1728, simple_loss=0.2759, pruned_loss=0.03486, over 7237.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2493, pruned_loss=0.03489, over 1383520.65 frames.], batch size: 24, lr: 4.46e-04 +2022-05-27 21:57:36,105 INFO [train.py:823] (1/4) Epoch 38, batch 800, loss[loss=0.1704, simple_loss=0.2561, pruned_loss=0.04235, over 7376.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2494, pruned_loss=0.03503, over 1385802.74 frames.], batch size: 21, lr: 4.45e-04 +2022-05-27 21:58:14,992 INFO [train.py:823] (1/4) Epoch 38, batch 850, loss[loss=0.2075, simple_loss=0.2945, pruned_loss=0.0602, over 6961.00 frames.], tot_loss[loss=0.1594, simple_loss=0.249, pruned_loss=0.03497, over 1395563.56 frames.], batch size: 29, lr: 4.45e-04 +2022-05-27 21:58:54,465 INFO [train.py:823] (1/4) Epoch 38, batch 900, loss[loss=0.1458, simple_loss=0.2274, pruned_loss=0.03211, over 7012.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2486, pruned_loss=0.03473, over 1399219.74 frames.], batch size: 16, lr: 4.45e-04 +2022-05-27 21:59:32,660 INFO [train.py:823] (1/4) Epoch 38, batch 950, loss[loss=0.184, simple_loss=0.2678, pruned_loss=0.05012, over 4819.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2482, pruned_loss=0.03509, over 1374990.41 frames.], batch size: 46, lr: 4.45e-04 +2022-05-27 21:59:45,977 INFO [train.py:823] (1/4) Epoch 39, batch 0, loss[loss=0.1562, simple_loss=0.2468, pruned_loss=0.0328, over 7295.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2468, pruned_loss=0.0328, over 7295.00 frames.], batch size: 19, lr: 4.39e-04 +2022-05-27 22:00:25,289 INFO [train.py:823] (1/4) Epoch 39, batch 50, loss[loss=0.1831, simple_loss=0.276, pruned_loss=0.04513, over 7413.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2521, pruned_loss=0.03628, over 321654.92 frames.], batch size: 22, lr: 4.39e-04 +2022-05-27 22:01:04,267 INFO [train.py:823] (1/4) Epoch 39, batch 100, loss[loss=0.1379, simple_loss=0.2222, pruned_loss=0.02677, over 7313.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2487, pruned_loss=0.03495, over 566131.82 frames.], batch size: 18, lr: 4.38e-04 +2022-05-27 22:01:43,951 INFO [train.py:823] (1/4) Epoch 39, batch 150, loss[loss=0.1598, simple_loss=0.2563, pruned_loss=0.03161, over 7229.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2463, pruned_loss=0.03378, over 754441.14 frames.], batch size: 25, lr: 4.38e-04 +2022-05-27 22:02:23,461 INFO [train.py:823] (1/4) Epoch 39, batch 200, loss[loss=0.1548, simple_loss=0.2392, pruned_loss=0.03524, over 7379.00 frames.], tot_loss[loss=0.157, simple_loss=0.2461, pruned_loss=0.03396, over 906605.73 frames.], batch size: 19, lr: 4.38e-04 +2022-05-27 22:03:03,284 INFO [train.py:823] (1/4) Epoch 39, batch 250, loss[loss=0.1443, simple_loss=0.2297, pruned_loss=0.02947, over 7292.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2469, pruned_loss=0.03407, over 1021152.28 frames.], batch size: 19, lr: 4.37e-04 +2022-05-27 22:03:42,558 INFO [train.py:823] (1/4) Epoch 39, batch 300, loss[loss=0.1416, simple_loss=0.231, pruned_loss=0.0261, over 7295.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2473, pruned_loss=0.03444, over 1113537.98 frames.], batch size: 19, lr: 4.37e-04 +2022-05-27 22:04:21,961 INFO [train.py:823] (1/4) Epoch 39, batch 350, loss[loss=0.1486, simple_loss=0.2407, pruned_loss=0.02821, over 7372.00 frames.], tot_loss[loss=0.1578, simple_loss=0.247, pruned_loss=0.03431, over 1184671.43 frames.], batch size: 20, lr: 4.37e-04 +2022-05-27 22:05:01,299 INFO [train.py:823] (1/4) Epoch 39, batch 400, loss[loss=0.145, simple_loss=0.2288, pruned_loss=0.03064, over 7026.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2478, pruned_loss=0.03447, over 1241404.57 frames.], batch size: 17, lr: 4.36e-04 +2022-05-27 22:05:40,475 INFO [train.py:823] (1/4) Epoch 39, batch 450, loss[loss=0.1721, simple_loss=0.2675, pruned_loss=0.03832, over 6977.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2483, pruned_loss=0.03461, over 1281067.28 frames.], batch size: 26, lr: 4.36e-04 +2022-05-27 22:06:19,076 INFO [train.py:823] (1/4) Epoch 39, batch 500, loss[loss=0.1568, simple_loss=0.2551, pruned_loss=0.02926, over 4449.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2481, pruned_loss=0.03467, over 1310981.59 frames.], batch size: 47, lr: 4.36e-04 +2022-05-27 22:06:58,251 INFO [train.py:823] (1/4) Epoch 39, batch 550, loss[loss=0.167, simple_loss=0.2586, pruned_loss=0.03768, over 7230.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2485, pruned_loss=0.0346, over 1332236.66 frames.], batch size: 25, lr: 4.36e-04 +2022-05-27 22:07:37,590 INFO [train.py:823] (1/4) Epoch 39, batch 600, loss[loss=0.1388, simple_loss=0.2262, pruned_loss=0.02572, over 7428.00 frames.], tot_loss[loss=0.159, simple_loss=0.2484, pruned_loss=0.03483, over 1355368.38 frames.], batch size: 18, lr: 4.35e-04 +2022-05-27 22:08:17,282 INFO [train.py:823] (1/4) Epoch 39, batch 650, loss[loss=0.1601, simple_loss=0.248, pruned_loss=0.03611, over 7403.00 frames.], tot_loss[loss=0.1586, simple_loss=0.248, pruned_loss=0.0346, over 1373969.17 frames.], batch size: 19, lr: 4.35e-04 +2022-05-27 22:08:55,705 INFO [train.py:823] (1/4) Epoch 39, batch 700, loss[loss=0.1619, simple_loss=0.254, pruned_loss=0.03491, over 7238.00 frames.], tot_loss[loss=0.158, simple_loss=0.2478, pruned_loss=0.0341, over 1384162.71 frames.], batch size: 24, lr: 4.35e-04 +2022-05-27 22:09:34,821 INFO [train.py:823] (1/4) Epoch 39, batch 750, loss[loss=0.1672, simple_loss=0.261, pruned_loss=0.03669, over 7384.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2473, pruned_loss=0.034, over 1390589.14 frames.], batch size: 20, lr: 4.34e-04 +2022-05-27 22:10:14,115 INFO [train.py:823] (1/4) Epoch 39, batch 800, loss[loss=0.1424, simple_loss=0.2305, pruned_loss=0.02716, over 7197.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2484, pruned_loss=0.03433, over 1399331.25 frames.], batch size: 18, lr: 4.34e-04 +2022-05-27 22:10:52,817 INFO [train.py:823] (1/4) Epoch 39, batch 850, loss[loss=0.1715, simple_loss=0.2734, pruned_loss=0.03474, over 7347.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2482, pruned_loss=0.03423, over 1398598.51 frames.], batch size: 23, lr: 4.34e-04 +2022-05-27 22:11:31,869 INFO [train.py:823] (1/4) Epoch 39, batch 900, loss[loss=0.1683, simple_loss=0.2575, pruned_loss=0.03958, over 6877.00 frames.], tot_loss[loss=0.159, simple_loss=0.249, pruned_loss=0.03454, over 1390426.73 frames.], batch size: 29, lr: 4.34e-04 +2022-05-27 22:12:11,440 INFO [train.py:823] (1/4) Epoch 39, batch 950, loss[loss=0.1572, simple_loss=0.2552, pruned_loss=0.02957, over 4669.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2495, pruned_loss=0.03515, over 1364677.42 frames.], batch size: 46, lr: 4.33e-04 +2022-05-27 22:12:24,466 INFO [train.py:823] (1/4) Epoch 40, batch 0, loss[loss=0.17, simple_loss=0.2615, pruned_loss=0.03923, over 7162.00 frames.], tot_loss[loss=0.17, simple_loss=0.2615, pruned_loss=0.03923, over 7162.00 frames.], batch size: 23, lr: 4.28e-04 +2022-05-27 22:13:03,073 INFO [train.py:823] (1/4) Epoch 40, batch 50, loss[loss=0.1485, simple_loss=0.2503, pruned_loss=0.02332, over 7113.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2489, pruned_loss=0.03481, over 318055.80 frames.], batch size: 20, lr: 4.28e-04 +2022-05-27 22:13:43,941 INFO [train.py:823] (1/4) Epoch 40, batch 100, loss[loss=0.1575, simple_loss=0.2358, pruned_loss=0.03955, over 7209.00 frames.], tot_loss[loss=0.157, simple_loss=0.2476, pruned_loss=0.03319, over 558982.98 frames.], batch size: 16, lr: 4.27e-04 +2022-05-27 22:14:22,932 INFO [train.py:823] (1/4) Epoch 40, batch 150, loss[loss=0.1556, simple_loss=0.2466, pruned_loss=0.03227, over 7036.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2476, pruned_loss=0.0333, over 746554.84 frames.], batch size: 29, lr: 4.27e-04 +2022-05-27 22:15:02,180 INFO [train.py:823] (1/4) Epoch 40, batch 200, loss[loss=0.1848, simple_loss=0.2718, pruned_loss=0.0489, over 7175.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2482, pruned_loss=0.0344, over 897462.28 frames.], batch size: 21, lr: 4.27e-04 +2022-05-27 22:15:42,481 INFO [train.py:823] (1/4) Epoch 40, batch 250, loss[loss=0.1336, simple_loss=0.2151, pruned_loss=0.02603, over 6813.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2477, pruned_loss=0.03383, over 1014289.79 frames.], batch size: 15, lr: 4.26e-04 +2022-05-27 22:16:22,013 INFO [train.py:823] (1/4) Epoch 40, batch 300, loss[loss=0.1429, simple_loss=0.2333, pruned_loss=0.02623, over 7373.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2476, pruned_loss=0.03408, over 1106052.67 frames.], batch size: 20, lr: 4.26e-04 +2022-05-27 22:17:01,096 INFO [train.py:823] (1/4) Epoch 40, batch 350, loss[loss=0.1737, simple_loss=0.2627, pruned_loss=0.04241, over 6570.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2489, pruned_loss=0.03482, over 1178629.17 frames.], batch size: 34, lr: 4.26e-04 +2022-05-27 22:17:42,245 INFO [train.py:823] (1/4) Epoch 40, batch 400, loss[loss=0.1535, simple_loss=0.2424, pruned_loss=0.03235, over 7017.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2483, pruned_loss=0.03408, over 1237031.25 frames.], batch size: 16, lr: 4.26e-04 +2022-05-27 22:18:21,045 INFO [train.py:823] (1/4) Epoch 40, batch 450, loss[loss=0.1633, simple_loss=0.2425, pruned_loss=0.04203, over 6830.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2484, pruned_loss=0.03394, over 1276407.07 frames.], batch size: 15, lr: 4.25e-04 +2022-05-27 22:19:00,406 INFO [train.py:823] (1/4) Epoch 40, batch 500, loss[loss=0.1494, simple_loss=0.2395, pruned_loss=0.02963, over 7369.00 frames.], tot_loss[loss=0.1585, simple_loss=0.249, pruned_loss=0.03401, over 1308795.41 frames.], batch size: 20, lr: 4.25e-04 +2022-05-27 22:19:39,643 INFO [train.py:823] (1/4) Epoch 40, batch 550, loss[loss=0.1739, simple_loss=0.2772, pruned_loss=0.03536, over 7299.00 frames.], tot_loss[loss=0.1578, simple_loss=0.248, pruned_loss=0.0338, over 1335526.06 frames.], batch size: 22, lr: 4.25e-04 +2022-05-27 22:20:18,680 INFO [train.py:823] (1/4) Epoch 40, batch 600, loss[loss=0.154, simple_loss=0.2495, pruned_loss=0.02925, over 7295.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2488, pruned_loss=0.03434, over 1355005.50 frames.], batch size: 22, lr: 4.24e-04 +2022-05-27 22:20:57,853 INFO [train.py:823] (1/4) Epoch 40, batch 650, loss[loss=0.1503, simple_loss=0.2464, pruned_loss=0.02713, over 7202.00 frames.], tot_loss[loss=0.1588, simple_loss=0.249, pruned_loss=0.03432, over 1364554.94 frames.], batch size: 19, lr: 4.24e-04 +2022-05-27 22:21:37,009 INFO [train.py:823] (1/4) Epoch 40, batch 700, loss[loss=0.1515, simple_loss=0.2485, pruned_loss=0.02728, over 7194.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2506, pruned_loss=0.03478, over 1377225.99 frames.], batch size: 20, lr: 4.24e-04 +2022-05-27 22:22:15,884 INFO [train.py:823] (1/4) Epoch 40, batch 750, loss[loss=0.1696, simple_loss=0.2506, pruned_loss=0.04433, over 4875.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2496, pruned_loss=0.03442, over 1388358.01 frames.], batch size: 46, lr: 4.24e-04 +2022-05-27 22:22:55,261 INFO [train.py:823] (1/4) Epoch 40, batch 800, loss[loss=0.1591, simple_loss=0.2559, pruned_loss=0.03114, over 7182.00 frames.], tot_loss[loss=0.1591, simple_loss=0.2495, pruned_loss=0.03437, over 1388057.20 frames.], batch size: 21, lr: 4.23e-04 +2022-05-27 22:23:34,228 INFO [train.py:823] (1/4) Epoch 40, batch 850, loss[loss=0.1705, simple_loss=0.2625, pruned_loss=0.03922, over 7165.00 frames.], tot_loss[loss=0.1589, simple_loss=0.2492, pruned_loss=0.03427, over 1397074.35 frames.], batch size: 22, lr: 4.23e-04 +2022-05-27 22:24:12,876 INFO [train.py:823] (1/4) Epoch 40, batch 900, loss[loss=0.1534, simple_loss=0.2375, pruned_loss=0.03459, over 7377.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2484, pruned_loss=0.03383, over 1390651.78 frames.], batch size: 20, lr: 4.23e-04 +2022-05-27 22:25:03,841 INFO [train.py:823] (1/4) Epoch 41, batch 0, loss[loss=0.1347, simple_loss=0.2192, pruned_loss=0.02512, over 7097.00 frames.], tot_loss[loss=0.1347, simple_loss=0.2192, pruned_loss=0.02512, over 7097.00 frames.], batch size: 19, lr: 4.17e-04 +2022-05-27 22:25:43,067 INFO [train.py:823] (1/4) Epoch 41, batch 50, loss[loss=0.1431, simple_loss=0.2405, pruned_loss=0.02284, over 7376.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2478, pruned_loss=0.03482, over 322095.46 frames.], batch size: 20, lr: 4.17e-04 +2022-05-27 22:26:21,958 INFO [train.py:823] (1/4) Epoch 41, batch 100, loss[loss=0.1634, simple_loss=0.2486, pruned_loss=0.03904, over 7098.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2481, pruned_loss=0.03381, over 562108.59 frames.], batch size: 18, lr: 4.17e-04 +2022-05-27 22:27:01,015 INFO [train.py:823] (1/4) Epoch 41, batch 150, loss[loss=0.1578, simple_loss=0.2482, pruned_loss=0.03366, over 7000.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2466, pruned_loss=0.03321, over 753817.47 frames.], batch size: 26, lr: 4.17e-04 +2022-05-27 22:27:40,088 INFO [train.py:823] (1/4) Epoch 41, batch 200, loss[loss=0.1689, simple_loss=0.2563, pruned_loss=0.04074, over 7387.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2481, pruned_loss=0.03403, over 905283.15 frames.], batch size: 19, lr: 4.16e-04 +2022-05-27 22:28:19,427 INFO [train.py:823] (1/4) Epoch 41, batch 250, loss[loss=0.156, simple_loss=0.2446, pruned_loss=0.03367, over 7095.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2469, pruned_loss=0.03373, over 1015987.10 frames.], batch size: 19, lr: 4.16e-04 +2022-05-27 22:28:57,989 INFO [train.py:823] (1/4) Epoch 41, batch 300, loss[loss=0.1533, simple_loss=0.2467, pruned_loss=0.02996, over 7370.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2467, pruned_loss=0.03356, over 1106857.09 frames.], batch size: 20, lr: 4.16e-04 +2022-05-27 22:29:36,974 INFO [train.py:823] (1/4) Epoch 41, batch 350, loss[loss=0.1827, simple_loss=0.2739, pruned_loss=0.04573, over 7170.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2477, pruned_loss=0.03387, over 1174558.20 frames.], batch size: 22, lr: 4.15e-04 +2022-05-27 22:30:15,339 INFO [train.py:823] (1/4) Epoch 41, batch 400, loss[loss=0.1667, simple_loss=0.256, pruned_loss=0.03865, over 7137.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2483, pruned_loss=0.03442, over 1221431.42 frames.], batch size: 23, lr: 4.15e-04 +2022-05-27 22:30:54,562 INFO [train.py:823] (1/4) Epoch 41, batch 450, loss[loss=0.121, simple_loss=0.2133, pruned_loss=0.01434, over 7092.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2479, pruned_loss=0.03432, over 1263073.01 frames.], batch size: 18, lr: 4.15e-04 +2022-05-27 22:31:33,815 INFO [train.py:823] (1/4) Epoch 41, batch 500, loss[loss=0.1544, simple_loss=0.25, pruned_loss=0.02939, over 7299.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2485, pruned_loss=0.03432, over 1298689.82 frames.], batch size: 22, lr: 4.15e-04 +2022-05-27 22:32:12,767 INFO [train.py:823] (1/4) Epoch 41, batch 550, loss[loss=0.1666, simple_loss=0.2653, pruned_loss=0.03394, over 7200.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2471, pruned_loss=0.03333, over 1322079.19 frames.], batch size: 19, lr: 4.14e-04 +2022-05-27 22:32:51,763 INFO [train.py:823] (1/4) Epoch 41, batch 600, loss[loss=0.2082, simple_loss=0.2941, pruned_loss=0.06112, over 7177.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2467, pruned_loss=0.03297, over 1338804.37 frames.], batch size: 21, lr: 4.14e-04 +2022-05-27 22:33:31,340 INFO [train.py:823] (1/4) Epoch 41, batch 650, loss[loss=0.1604, simple_loss=0.2537, pruned_loss=0.03352, over 7179.00 frames.], tot_loss[loss=0.157, simple_loss=0.247, pruned_loss=0.03353, over 1357906.20 frames.], batch size: 21, lr: 4.14e-04 +2022-05-27 22:34:10,110 INFO [train.py:823] (1/4) Epoch 41, batch 700, loss[loss=0.1581, simple_loss=0.2372, pruned_loss=0.03948, over 7244.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2466, pruned_loss=0.03334, over 1371326.07 frames.], batch size: 16, lr: 4.14e-04 +2022-05-27 22:34:50,533 INFO [train.py:823] (1/4) Epoch 41, batch 750, loss[loss=0.1346, simple_loss=0.2247, pruned_loss=0.02221, over 7187.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2464, pruned_loss=0.03306, over 1380159.51 frames.], batch size: 18, lr: 4.13e-04 +2022-05-27 22:35:29,196 INFO [train.py:823] (1/4) Epoch 41, batch 800, loss[loss=0.1249, simple_loss=0.2071, pruned_loss=0.02135, over 7299.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2468, pruned_loss=0.03347, over 1383144.82 frames.], batch size: 17, lr: 4.13e-04 +2022-05-27 22:36:09,505 INFO [train.py:823] (1/4) Epoch 41, batch 850, loss[loss=0.1195, simple_loss=0.2164, pruned_loss=0.01124, over 7305.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2475, pruned_loss=0.03353, over 1395528.34 frames.], batch size: 19, lr: 4.13e-04 +2022-05-27 22:36:48,686 INFO [train.py:823] (1/4) Epoch 41, batch 900, loss[loss=0.1335, simple_loss=0.2162, pruned_loss=0.02539, over 7306.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2474, pruned_loss=0.03374, over 1400822.57 frames.], batch size: 17, lr: 4.13e-04 +2022-05-27 22:37:42,326 INFO [train.py:823] (1/4) Epoch 42, batch 0, loss[loss=0.1698, simple_loss=0.2692, pruned_loss=0.03517, over 7288.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2692, pruned_loss=0.03517, over 7288.00 frames.], batch size: 21, lr: 4.07e-04 +2022-05-27 22:38:21,706 INFO [train.py:823] (1/4) Epoch 42, batch 50, loss[loss=0.1753, simple_loss=0.2485, pruned_loss=0.05104, over 7388.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2429, pruned_loss=0.03389, over 323520.26 frames.], batch size: 19, lr: 4.07e-04 +2022-05-27 22:39:02,288 INFO [train.py:823] (1/4) Epoch 42, batch 100, loss[loss=0.1364, simple_loss=0.2154, pruned_loss=0.02872, over 7233.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2429, pruned_loss=0.03194, over 566332.69 frames.], batch size: 16, lr: 4.07e-04 +2022-05-27 22:39:41,236 INFO [train.py:823] (1/4) Epoch 42, batch 150, loss[loss=0.1866, simple_loss=0.266, pruned_loss=0.05362, over 7178.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2439, pruned_loss=0.0325, over 755653.59 frames.], batch size: 22, lr: 4.07e-04 +2022-05-27 22:40:22,035 INFO [train.py:823] (1/4) Epoch 42, batch 200, loss[loss=0.1507, simple_loss=0.2472, pruned_loss=0.02711, over 7240.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2455, pruned_loss=0.03296, over 901443.67 frames.], batch size: 24, lr: 4.06e-04 +2022-05-27 22:41:01,108 INFO [train.py:823] (1/4) Epoch 42, batch 250, loss[loss=0.1535, simple_loss=0.2321, pruned_loss=0.03746, over 7147.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2464, pruned_loss=0.0335, over 1016831.45 frames.], batch size: 17, lr: 4.06e-04 +2022-05-27 22:41:40,208 INFO [train.py:823] (1/4) Epoch 42, batch 300, loss[loss=0.1509, simple_loss=0.2385, pruned_loss=0.03166, over 7187.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2464, pruned_loss=0.03387, over 1101675.03 frames.], batch size: 21, lr: 4.06e-04 +2022-05-27 22:42:18,646 INFO [train.py:823] (1/4) Epoch 42, batch 350, loss[loss=0.1475, simple_loss=0.2297, pruned_loss=0.03261, over 7155.00 frames.], tot_loss[loss=0.157, simple_loss=0.2463, pruned_loss=0.03385, over 1168395.34 frames.], batch size: 17, lr: 4.06e-04 +2022-05-27 22:42:57,677 INFO [train.py:823] (1/4) Epoch 42, batch 400, loss[loss=0.1318, simple_loss=0.2177, pruned_loss=0.02297, over 7300.00 frames.], tot_loss[loss=0.157, simple_loss=0.2467, pruned_loss=0.03368, over 1219245.03 frames.], batch size: 17, lr: 4.05e-04 +2022-05-27 22:43:36,610 INFO [train.py:823] (1/4) Epoch 42, batch 450, loss[loss=0.1713, simple_loss=0.2735, pruned_loss=0.03452, over 7217.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2483, pruned_loss=0.03432, over 1268708.68 frames.], batch size: 25, lr: 4.05e-04 +2022-05-27 22:44:16,159 INFO [train.py:823] (1/4) Epoch 42, batch 500, loss[loss=0.1444, simple_loss=0.2257, pruned_loss=0.03159, over 7139.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2466, pruned_loss=0.03355, over 1303120.88 frames.], batch size: 17, lr: 4.05e-04 +2022-05-27 22:44:54,532 INFO [train.py:823] (1/4) Epoch 42, batch 550, loss[loss=0.1404, simple_loss=0.2266, pruned_loss=0.02708, over 7183.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2463, pruned_loss=0.03318, over 1322749.41 frames.], batch size: 18, lr: 4.05e-04 +2022-05-27 22:45:33,818 INFO [train.py:823] (1/4) Epoch 42, batch 600, loss[loss=0.1504, simple_loss=0.2408, pruned_loss=0.02999, over 7193.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2471, pruned_loss=0.03313, over 1344233.91 frames.], batch size: 20, lr: 4.04e-04 +2022-05-27 22:46:12,674 INFO [train.py:823] (1/4) Epoch 42, batch 650, loss[loss=0.163, simple_loss=0.2511, pruned_loss=0.03747, over 7137.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2477, pruned_loss=0.03333, over 1364836.35 frames.], batch size: 23, lr: 4.04e-04 +2022-05-27 22:46:51,886 INFO [train.py:823] (1/4) Epoch 42, batch 700, loss[loss=0.1821, simple_loss=0.2772, pruned_loss=0.04351, over 6887.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2487, pruned_loss=0.03392, over 1371644.82 frames.], batch size: 29, lr: 4.04e-04 +2022-05-27 22:47:31,149 INFO [train.py:823] (1/4) Epoch 42, batch 750, loss[loss=0.1769, simple_loss=0.2753, pruned_loss=0.03927, over 7382.00 frames.], tot_loss[loss=0.1577, simple_loss=0.248, pruned_loss=0.03367, over 1385723.78 frames.], batch size: 21, lr: 4.04e-04 +2022-05-27 22:48:10,574 INFO [train.py:823] (1/4) Epoch 42, batch 800, loss[loss=0.1693, simple_loss=0.2659, pruned_loss=0.03637, over 6569.00 frames.], tot_loss[loss=0.158, simple_loss=0.2481, pruned_loss=0.03396, over 1393700.88 frames.], batch size: 34, lr: 4.03e-04 +2022-05-27 22:48:49,650 INFO [train.py:823] (1/4) Epoch 42, batch 850, loss[loss=0.1307, simple_loss=0.2204, pruned_loss=0.02046, over 7013.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2473, pruned_loss=0.03364, over 1400046.59 frames.], batch size: 17, lr: 4.03e-04 +2022-05-27 22:49:29,019 INFO [train.py:823] (1/4) Epoch 42, batch 900, loss[loss=0.174, simple_loss=0.2664, pruned_loss=0.04077, over 5349.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2462, pruned_loss=0.03302, over 1398975.44 frames.], batch size: 46, lr: 4.03e-04 +2022-05-27 22:50:20,071 INFO [train.py:823] (1/4) Epoch 43, batch 0, loss[loss=0.144, simple_loss=0.2333, pruned_loss=0.02734, over 7289.00 frames.], tot_loss[loss=0.144, simple_loss=0.2333, pruned_loss=0.02734, over 7289.00 frames.], batch size: 19, lr: 3.98e-04 +2022-05-27 22:50:59,516 INFO [train.py:823] (1/4) Epoch 43, batch 50, loss[loss=0.141, simple_loss=0.2338, pruned_loss=0.02404, over 7372.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2472, pruned_loss=0.03288, over 322642.04 frames.], batch size: 20, lr: 3.98e-04 +2022-05-27 22:51:38,797 INFO [train.py:823] (1/4) Epoch 43, batch 100, loss[loss=0.1485, simple_loss=0.2413, pruned_loss=0.02783, over 7150.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2448, pruned_loss=0.03275, over 566809.77 frames.], batch size: 23, lr: 3.97e-04 +2022-05-27 22:52:22,686 INFO [train.py:823] (1/4) Epoch 43, batch 150, loss[loss=0.1471, simple_loss=0.2301, pruned_loss=0.03209, over 6547.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2465, pruned_loss=0.03334, over 755954.79 frames.], batch size: 35, lr: 3.97e-04 +2022-05-27 22:53:01,476 INFO [train.py:823] (1/4) Epoch 43, batch 200, loss[loss=0.2121, simple_loss=0.2964, pruned_loss=0.06386, over 7340.00 frames.], tot_loss[loss=0.157, simple_loss=0.247, pruned_loss=0.03346, over 907141.05 frames.], batch size: 23, lr: 3.97e-04 +2022-05-27 22:53:40,827 INFO [train.py:823] (1/4) Epoch 43, batch 250, loss[loss=0.1524, simple_loss=0.2464, pruned_loss=0.02924, over 7308.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2469, pruned_loss=0.03287, over 1022927.14 frames.], batch size: 18, lr: 3.97e-04 +2022-05-27 22:54:19,545 INFO [train.py:823] (1/4) Epoch 43, batch 300, loss[loss=0.1551, simple_loss=0.2492, pruned_loss=0.03048, over 7104.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2463, pruned_loss=0.03309, over 1102784.68 frames.], batch size: 18, lr: 3.96e-04 +2022-05-27 22:54:59,037 INFO [train.py:823] (1/4) Epoch 43, batch 350, loss[loss=0.1683, simple_loss=0.256, pruned_loss=0.04034, over 7346.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2472, pruned_loss=0.03356, over 1174949.81 frames.], batch size: 23, lr: 3.96e-04 +2022-05-27 22:55:37,588 INFO [train.py:823] (1/4) Epoch 43, batch 400, loss[loss=0.1533, simple_loss=0.2482, pruned_loss=0.02915, over 7193.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2466, pruned_loss=0.03358, over 1229584.23 frames.], batch size: 20, lr: 3.96e-04 +2022-05-27 22:56:17,083 INFO [train.py:823] (1/4) Epoch 43, batch 450, loss[loss=0.152, simple_loss=0.2496, pruned_loss=0.02717, over 7184.00 frames.], tot_loss[loss=0.157, simple_loss=0.2471, pruned_loss=0.0335, over 1276097.72 frames.], batch size: 21, lr: 3.96e-04 +2022-05-27 22:56:56,286 INFO [train.py:823] (1/4) Epoch 43, batch 500, loss[loss=0.1469, simple_loss=0.2196, pruned_loss=0.0371, over 7419.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2468, pruned_loss=0.0331, over 1307449.12 frames.], batch size: 18, lr: 3.95e-04 +2022-05-27 22:57:35,564 INFO [train.py:823] (1/4) Epoch 43, batch 550, loss[loss=0.1703, simple_loss=0.2698, pruned_loss=0.03539, over 7275.00 frames.], tot_loss[loss=0.157, simple_loss=0.2475, pruned_loss=0.03321, over 1337319.64 frames.], batch size: 21, lr: 3.95e-04 +2022-05-27 22:58:14,105 INFO [train.py:823] (1/4) Epoch 43, batch 600, loss[loss=0.1592, simple_loss=0.2584, pruned_loss=0.03, over 7165.00 frames.], tot_loss[loss=0.157, simple_loss=0.2475, pruned_loss=0.03326, over 1356896.61 frames.], batch size: 22, lr: 3.95e-04 +2022-05-27 22:58:54,270 INFO [train.py:823] (1/4) Epoch 43, batch 650, loss[loss=0.1499, simple_loss=0.2481, pruned_loss=0.0258, over 7196.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2465, pruned_loss=0.03292, over 1374131.81 frames.], batch size: 20, lr: 3.95e-04 +2022-05-27 22:59:34,021 INFO [train.py:823] (1/4) Epoch 43, batch 700, loss[loss=0.1511, simple_loss=0.2306, pruned_loss=0.0358, over 7445.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2464, pruned_loss=0.03269, over 1384505.59 frames.], batch size: 18, lr: 3.94e-04 +2022-05-27 23:00:13,134 INFO [train.py:823] (1/4) Epoch 43, batch 750, loss[loss=0.1507, simple_loss=0.2469, pruned_loss=0.02723, over 7184.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2461, pruned_loss=0.03291, over 1394756.17 frames.], batch size: 21, lr: 3.94e-04 +2022-05-27 23:00:51,802 INFO [train.py:823] (1/4) Epoch 43, batch 800, loss[loss=0.1503, simple_loss=0.2462, pruned_loss=0.0272, over 7310.00 frames.], tot_loss[loss=0.156, simple_loss=0.2459, pruned_loss=0.03303, over 1403720.47 frames.], batch size: 22, lr: 3.94e-04 +2022-05-27 23:01:30,940 INFO [train.py:823] (1/4) Epoch 43, batch 850, loss[loss=0.1599, simple_loss=0.2527, pruned_loss=0.03355, over 7169.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2457, pruned_loss=0.03283, over 1405758.55 frames.], batch size: 22, lr: 3.94e-04 +2022-05-27 23:02:11,062 INFO [train.py:823] (1/4) Epoch 43, batch 900, loss[loss=0.1725, simple_loss=0.2457, pruned_loss=0.04961, over 7239.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2448, pruned_loss=0.03235, over 1403241.52 frames.], batch size: 16, lr: 3.93e-04 +2022-05-27 23:03:00,542 INFO [train.py:823] (1/4) Epoch 44, batch 0, loss[loss=0.1571, simple_loss=0.2634, pruned_loss=0.02538, over 7312.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2634, pruned_loss=0.02538, over 7312.00 frames.], batch size: 22, lr: 3.89e-04 +2022-05-27 23:03:41,207 INFO [train.py:823] (1/4) Epoch 44, batch 50, loss[loss=0.142, simple_loss=0.2331, pruned_loss=0.02544, over 7017.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2411, pruned_loss=0.03161, over 321668.43 frames.], batch size: 17, lr: 3.89e-04 +2022-05-27 23:04:20,548 INFO [train.py:823] (1/4) Epoch 44, batch 100, loss[loss=0.1378, simple_loss=0.2333, pruned_loss=0.02117, over 7278.00 frames.], tot_loss[loss=0.153, simple_loss=0.2426, pruned_loss=0.03173, over 566331.57 frames.], batch size: 20, lr: 3.88e-04 +2022-05-27 23:04:59,694 INFO [train.py:823] (1/4) Epoch 44, batch 150, loss[loss=0.1544, simple_loss=0.2429, pruned_loss=0.03292, over 7283.00 frames.], tot_loss[loss=0.155, simple_loss=0.2451, pruned_loss=0.03241, over 757642.89 frames.], batch size: 20, lr: 3.88e-04 +2022-05-27 23:05:38,959 INFO [train.py:823] (1/4) Epoch 44, batch 200, loss[loss=0.1809, simple_loss=0.2682, pruned_loss=0.04684, over 7235.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2445, pruned_loss=0.03216, over 903171.91 frames.], batch size: 24, lr: 3.88e-04 +2022-05-27 23:06:18,012 INFO [train.py:823] (1/4) Epoch 44, batch 250, loss[loss=0.1516, simple_loss=0.2359, pruned_loss=0.0337, over 7164.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2447, pruned_loss=0.03235, over 1019795.76 frames.], batch size: 23, lr: 3.88e-04 +2022-05-27 23:06:56,884 INFO [train.py:823] (1/4) Epoch 44, batch 300, loss[loss=0.1804, simple_loss=0.2705, pruned_loss=0.04514, over 7275.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2451, pruned_loss=0.03262, over 1107182.02 frames.], batch size: 21, lr: 3.87e-04 +2022-05-27 23:07:35,760 INFO [train.py:823] (1/4) Epoch 44, batch 350, loss[loss=0.1432, simple_loss=0.2186, pruned_loss=0.03394, over 7010.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2462, pruned_loss=0.03313, over 1171211.46 frames.], batch size: 16, lr: 3.87e-04 +2022-05-27 23:08:14,786 INFO [train.py:823] (1/4) Epoch 44, batch 400, loss[loss=0.1543, simple_loss=0.2348, pruned_loss=0.03684, over 5042.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2459, pruned_loss=0.03315, over 1222496.53 frames.], batch size: 47, lr: 3.87e-04 +2022-05-27 23:08:53,941 INFO [train.py:823] (1/4) Epoch 44, batch 450, loss[loss=0.1648, simple_loss=0.2522, pruned_loss=0.03866, over 7241.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2459, pruned_loss=0.03315, over 1265652.20 frames.], batch size: 25, lr: 3.87e-04 +2022-05-27 23:09:33,426 INFO [train.py:823] (1/4) Epoch 44, batch 500, loss[loss=0.1793, simple_loss=0.2677, pruned_loss=0.04541, over 7147.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2457, pruned_loss=0.03325, over 1302801.85 frames.], batch size: 17, lr: 3.86e-04 +2022-05-27 23:10:12,724 INFO [train.py:823] (1/4) Epoch 44, batch 550, loss[loss=0.1444, simple_loss=0.2358, pruned_loss=0.02645, over 7232.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2457, pruned_loss=0.03282, over 1331330.62 frames.], batch size: 24, lr: 3.86e-04 +2022-05-27 23:10:52,007 INFO [train.py:823] (1/4) Epoch 44, batch 600, loss[loss=0.148, simple_loss=0.2356, pruned_loss=0.03023, over 7399.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2457, pruned_loss=0.03254, over 1353982.98 frames.], batch size: 19, lr: 3.86e-04 +2022-05-27 23:11:30,662 INFO [train.py:823] (1/4) Epoch 44, batch 650, loss[loss=0.1528, simple_loss=0.2426, pruned_loss=0.03152, over 7428.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2462, pruned_loss=0.03263, over 1367465.91 frames.], batch size: 22, lr: 3.86e-04 +2022-05-27 23:12:09,871 INFO [train.py:823] (1/4) Epoch 44, batch 700, loss[loss=0.1704, simple_loss=0.2646, pruned_loss=0.03804, over 7126.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2458, pruned_loss=0.03263, over 1379333.11 frames.], batch size: 23, lr: 3.85e-04 +2022-05-27 23:12:48,549 INFO [train.py:823] (1/4) Epoch 44, batch 750, loss[loss=0.1746, simple_loss=0.256, pruned_loss=0.0466, over 7164.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2464, pruned_loss=0.03291, over 1390848.61 frames.], batch size: 17, lr: 3.85e-04 +2022-05-27 23:13:27,419 INFO [train.py:823] (1/4) Epoch 44, batch 800, loss[loss=0.1638, simple_loss=0.2688, pruned_loss=0.02939, over 7212.00 frames.], tot_loss[loss=0.156, simple_loss=0.2469, pruned_loss=0.03257, over 1397368.48 frames.], batch size: 25, lr: 3.85e-04 +2022-05-27 23:14:06,939 INFO [train.py:823] (1/4) Epoch 44, batch 850, loss[loss=0.1342, simple_loss=0.214, pruned_loss=0.02723, over 7233.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2463, pruned_loss=0.03257, over 1403682.08 frames.], batch size: 16, lr: 3.85e-04 +2022-05-27 23:14:46,122 INFO [train.py:823] (1/4) Epoch 44, batch 900, loss[loss=0.1292, simple_loss=0.2119, pruned_loss=0.02325, over 7279.00 frames.], tot_loss[loss=0.156, simple_loss=0.2467, pruned_loss=0.03265, over 1401357.96 frames.], batch size: 17, lr: 3.85e-04 +2022-05-27 23:15:24,412 INFO [train.py:823] (1/4) Epoch 44, batch 950, loss[loss=0.1636, simple_loss=0.2518, pruned_loss=0.03773, over 4885.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2465, pruned_loss=0.03314, over 1378094.23 frames.], batch size: 47, lr: 3.84e-04 +2022-05-27 23:15:37,762 INFO [train.py:823] (1/4) Epoch 45, batch 0, loss[loss=0.1319, simple_loss=0.227, pruned_loss=0.01837, over 7285.00 frames.], tot_loss[loss=0.1319, simple_loss=0.227, pruned_loss=0.01837, over 7285.00 frames.], batch size: 20, lr: 3.80e-04 +2022-05-27 23:16:17,141 INFO [train.py:823] (1/4) Epoch 45, batch 50, loss[loss=0.1526, simple_loss=0.2498, pruned_loss=0.0277, over 7286.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2412, pruned_loss=0.03073, over 323982.53 frames.], batch size: 21, lr: 3.80e-04 +2022-05-27 23:16:56,290 INFO [train.py:823] (1/4) Epoch 45, batch 100, loss[loss=0.1768, simple_loss=0.2808, pruned_loss=0.0364, over 7377.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2433, pruned_loss=0.0319, over 567486.50 frames.], batch size: 21, lr: 3.80e-04 +2022-05-27 23:17:35,591 INFO [train.py:823] (1/4) Epoch 45, batch 150, loss[loss=0.1335, simple_loss=0.2214, pruned_loss=0.02281, over 7235.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2445, pruned_loss=0.0321, over 752825.57 frames.], batch size: 16, lr: 3.79e-04 +2022-05-27 23:18:14,555 INFO [train.py:823] (1/4) Epoch 45, batch 200, loss[loss=0.1538, simple_loss=0.2465, pruned_loss=0.03059, over 4973.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2448, pruned_loss=0.03208, over 896746.72 frames.], batch size: 46, lr: 3.79e-04 +2022-05-27 23:18:53,794 INFO [train.py:823] (1/4) Epoch 45, batch 250, loss[loss=0.1435, simple_loss=0.2451, pruned_loss=0.02091, over 6625.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2447, pruned_loss=0.03171, over 1010643.81 frames.], batch size: 34, lr: 3.79e-04 +2022-05-27 23:19:32,694 INFO [train.py:823] (1/4) Epoch 45, batch 300, loss[loss=0.181, simple_loss=0.2792, pruned_loss=0.04139, over 7161.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2435, pruned_loss=0.03134, over 1100293.54 frames.], batch size: 23, lr: 3.79e-04 +2022-05-27 23:20:11,909 INFO [train.py:823] (1/4) Epoch 45, batch 350, loss[loss=0.1582, simple_loss=0.2537, pruned_loss=0.03136, over 7418.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2445, pruned_loss=0.03201, over 1173146.05 frames.], batch size: 22, lr: 3.78e-04 +2022-05-27 23:20:50,907 INFO [train.py:823] (1/4) Epoch 45, batch 400, loss[loss=0.1546, simple_loss=0.2455, pruned_loss=0.03188, over 7380.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2444, pruned_loss=0.03202, over 1229890.76 frames.], batch size: 20, lr: 3.78e-04 +2022-05-27 23:21:30,379 INFO [train.py:823] (1/4) Epoch 45, batch 450, loss[loss=0.1377, simple_loss=0.2228, pruned_loss=0.02635, over 7186.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2449, pruned_loss=0.03184, over 1270116.04 frames.], batch size: 18, lr: 3.78e-04 +2022-05-27 23:22:12,116 INFO [train.py:823] (1/4) Epoch 45, batch 500, loss[loss=0.1795, simple_loss=0.2722, pruned_loss=0.04339, over 7199.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2454, pruned_loss=0.03244, over 1309238.86 frames.], batch size: 24, lr: 3.78e-04 +2022-05-27 23:22:51,883 INFO [train.py:823] (1/4) Epoch 45, batch 550, loss[loss=0.1419, simple_loss=0.225, pruned_loss=0.02945, over 7200.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2447, pruned_loss=0.03228, over 1335073.26 frames.], batch size: 18, lr: 3.78e-04 +2022-05-27 23:23:31,257 INFO [train.py:823] (1/4) Epoch 45, batch 600, loss[loss=0.1481, simple_loss=0.246, pruned_loss=0.02512, over 6600.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2446, pruned_loss=0.03221, over 1349151.85 frames.], batch size: 34, lr: 3.77e-04 +2022-05-27 23:24:10,233 INFO [train.py:823] (1/4) Epoch 45, batch 650, loss[loss=0.1684, simple_loss=0.2622, pruned_loss=0.03729, over 7160.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2446, pruned_loss=0.0322, over 1364195.07 frames.], batch size: 23, lr: 3.77e-04 +2022-05-27 23:24:50,422 INFO [train.py:823] (1/4) Epoch 45, batch 700, loss[loss=0.16, simple_loss=0.2571, pruned_loss=0.03144, over 7301.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2453, pruned_loss=0.03219, over 1378450.59 frames.], batch size: 22, lr: 3.77e-04 +2022-05-27 23:25:30,013 INFO [train.py:823] (1/4) Epoch 45, batch 750, loss[loss=0.1604, simple_loss=0.2496, pruned_loss=0.0356, over 6948.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2458, pruned_loss=0.0323, over 1387524.66 frames.], batch size: 29, lr: 3.77e-04 +2022-05-27 23:26:08,779 INFO [train.py:823] (1/4) Epoch 45, batch 800, loss[loss=0.1626, simple_loss=0.244, pruned_loss=0.0406, over 7311.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2452, pruned_loss=0.03181, over 1396725.74 frames.], batch size: 23, lr: 3.77e-04 +2022-05-27 23:26:49,333 INFO [train.py:823] (1/4) Epoch 45, batch 850, loss[loss=0.1595, simple_loss=0.2546, pruned_loss=0.03226, over 7193.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2466, pruned_loss=0.03241, over 1398895.74 frames.], batch size: 21, lr: 3.76e-04 +2022-05-27 23:27:28,222 INFO [train.py:823] (1/4) Epoch 45, batch 900, loss[loss=0.1682, simple_loss=0.2458, pruned_loss=0.04531, over 7025.00 frames.], tot_loss[loss=0.1559, simple_loss=0.247, pruned_loss=0.03246, over 1401138.58 frames.], batch size: 17, lr: 3.76e-04 +2022-05-27 23:28:22,789 INFO [train.py:823] (1/4) Epoch 46, batch 0, loss[loss=0.1738, simple_loss=0.2608, pruned_loss=0.04333, over 7185.00 frames.], tot_loss[loss=0.1738, simple_loss=0.2608, pruned_loss=0.04333, over 7185.00 frames.], batch size: 22, lr: 3.72e-04 +2022-05-27 23:29:02,126 INFO [train.py:823] (1/4) Epoch 46, batch 50, loss[loss=0.1662, simple_loss=0.2637, pruned_loss=0.03429, over 7288.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2441, pruned_loss=0.03231, over 314716.28 frames.], batch size: 20, lr: 3.72e-04 +2022-05-27 23:29:41,267 INFO [train.py:823] (1/4) Epoch 46, batch 100, loss[loss=0.1298, simple_loss=0.2113, pruned_loss=0.02415, over 7030.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2419, pruned_loss=0.03114, over 560957.85 frames.], batch size: 16, lr: 3.71e-04 +2022-05-27 23:30:20,231 INFO [train.py:823] (1/4) Epoch 46, batch 150, loss[loss=0.1573, simple_loss=0.2462, pruned_loss=0.03418, over 7118.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2433, pruned_loss=0.03201, over 753275.21 frames.], batch size: 20, lr: 3.71e-04 +2022-05-27 23:30:59,847 INFO [train.py:823] (1/4) Epoch 46, batch 200, loss[loss=0.1872, simple_loss=0.2664, pruned_loss=0.05399, over 7336.00 frames.], tot_loss[loss=0.1527, simple_loss=0.242, pruned_loss=0.03173, over 906327.93 frames.], batch size: 23, lr: 3.71e-04 +2022-05-27 23:31:39,103 INFO [train.py:823] (1/4) Epoch 46, batch 250, loss[loss=0.1581, simple_loss=0.2541, pruned_loss=0.03106, over 7143.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2434, pruned_loss=0.0322, over 1020815.40 frames.], batch size: 23, lr: 3.71e-04 +2022-05-27 23:32:18,143 INFO [train.py:823] (1/4) Epoch 46, batch 300, loss[loss=0.1762, simple_loss=0.2698, pruned_loss=0.04129, over 6961.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2443, pruned_loss=0.03256, over 1108086.23 frames.], batch size: 29, lr: 3.70e-04 +2022-05-27 23:32:56,765 INFO [train.py:823] (1/4) Epoch 46, batch 350, loss[loss=0.175, simple_loss=0.2807, pruned_loss=0.03462, over 6582.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2456, pruned_loss=0.03254, over 1179688.11 frames.], batch size: 34, lr: 3.70e-04 +2022-05-27 23:33:36,299 INFO [train.py:823] (1/4) Epoch 46, batch 400, loss[loss=0.152, simple_loss=0.2455, pruned_loss=0.02924, over 7151.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2457, pruned_loss=0.03201, over 1236093.76 frames.], batch size: 23, lr: 3.70e-04 +2022-05-27 23:34:15,508 INFO [train.py:823] (1/4) Epoch 46, batch 450, loss[loss=0.1634, simple_loss=0.2605, pruned_loss=0.03309, over 7280.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2452, pruned_loss=0.03193, over 1279186.35 frames.], batch size: 20, lr: 3.70e-04 +2022-05-27 23:34:54,735 INFO [train.py:823] (1/4) Epoch 46, batch 500, loss[loss=0.1376, simple_loss=0.2153, pruned_loss=0.02995, over 6777.00 frames.], tot_loss[loss=0.1541, simple_loss=0.245, pruned_loss=0.03163, over 1303467.77 frames.], batch size: 15, lr: 3.70e-04 +2022-05-27 23:35:33,971 INFO [train.py:823] (1/4) Epoch 46, batch 550, loss[loss=0.1383, simple_loss=0.2358, pruned_loss=0.02042, over 7306.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2447, pruned_loss=0.03143, over 1333433.13 frames.], batch size: 22, lr: 3.69e-04 +2022-05-27 23:36:13,160 INFO [train.py:823] (1/4) Epoch 46, batch 600, loss[loss=0.1519, simple_loss=0.2335, pruned_loss=0.03518, over 7019.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2443, pruned_loss=0.03111, over 1352147.72 frames.], batch size: 17, lr: 3.69e-04 +2022-05-27 23:36:52,193 INFO [train.py:823] (1/4) Epoch 46, batch 650, loss[loss=0.1528, simple_loss=0.2478, pruned_loss=0.02894, over 7154.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2439, pruned_loss=0.03089, over 1366911.66 frames.], batch size: 23, lr: 3.69e-04 +2022-05-27 23:37:31,616 INFO [train.py:823] (1/4) Epoch 46, batch 700, loss[loss=0.145, simple_loss=0.228, pruned_loss=0.03103, over 7151.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2432, pruned_loss=0.03104, over 1374845.29 frames.], batch size: 17, lr: 3.69e-04 +2022-05-27 23:38:10,487 INFO [train.py:823] (1/4) Epoch 46, batch 750, loss[loss=0.1663, simple_loss=0.2668, pruned_loss=0.03292, over 6525.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2435, pruned_loss=0.03136, over 1383027.24 frames.], batch size: 34, lr: 3.69e-04 +2022-05-27 23:38:49,155 INFO [train.py:823] (1/4) Epoch 46, batch 800, loss[loss=0.1412, simple_loss=0.2337, pruned_loss=0.02435, over 7204.00 frames.], tot_loss[loss=0.153, simple_loss=0.2433, pruned_loss=0.0313, over 1387163.99 frames.], batch size: 20, lr: 3.68e-04 +2022-05-27 23:39:28,292 INFO [train.py:823] (1/4) Epoch 46, batch 850, loss[loss=0.1676, simple_loss=0.2627, pruned_loss=0.03625, over 7320.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2431, pruned_loss=0.0313, over 1389384.65 frames.], batch size: 23, lr: 3.68e-04 +2022-05-27 23:40:07,668 INFO [train.py:823] (1/4) Epoch 46, batch 900, loss[loss=0.1431, simple_loss=0.2345, pruned_loss=0.02589, over 7102.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2442, pruned_loss=0.03184, over 1396865.00 frames.], batch size: 18, lr: 3.68e-04 +2022-05-27 23:41:01,399 INFO [train.py:823] (1/4) Epoch 47, batch 0, loss[loss=0.1338, simple_loss=0.2134, pruned_loss=0.02705, over 7017.00 frames.], tot_loss[loss=0.1338, simple_loss=0.2134, pruned_loss=0.02705, over 7017.00 frames.], batch size: 16, lr: 3.64e-04 +2022-05-27 23:41:40,300 INFO [train.py:823] (1/4) Epoch 47, batch 50, loss[loss=0.152, simple_loss=0.2306, pruned_loss=0.0367, over 7313.00 frames.], tot_loss[loss=0.1506, simple_loss=0.24, pruned_loss=0.03059, over 321812.20 frames.], batch size: 17, lr: 3.64e-04 +2022-05-27 23:42:19,469 INFO [train.py:823] (1/4) Epoch 47, batch 100, loss[loss=0.1422, simple_loss=0.2251, pruned_loss=0.02966, over 7314.00 frames.], tot_loss[loss=0.1508, simple_loss=0.2396, pruned_loss=0.03101, over 565189.48 frames.], batch size: 18, lr: 3.63e-04 +2022-05-27 23:42:58,951 INFO [train.py:823] (1/4) Epoch 47, batch 150, loss[loss=0.1766, simple_loss=0.2713, pruned_loss=0.04095, over 7294.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2415, pruned_loss=0.03158, over 757365.55 frames.], batch size: 22, lr: 3.63e-04 +2022-05-27 23:43:37,738 INFO [train.py:823] (1/4) Epoch 47, batch 200, loss[loss=0.188, simple_loss=0.2622, pruned_loss=0.05691, over 7080.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2421, pruned_loss=0.03155, over 901232.00 frames.], batch size: 18, lr: 3.63e-04 +2022-05-27 23:44:17,085 INFO [train.py:823] (1/4) Epoch 47, batch 250, loss[loss=0.1565, simple_loss=0.2498, pruned_loss=0.03159, over 7395.00 frames.], tot_loss[loss=0.1532, simple_loss=0.243, pruned_loss=0.03167, over 1022264.01 frames.], batch size: 19, lr: 3.63e-04 +2022-05-27 23:44:56,260 INFO [train.py:823] (1/4) Epoch 47, batch 300, loss[loss=0.1486, simple_loss=0.2384, pruned_loss=0.02939, over 7187.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2438, pruned_loss=0.03159, over 1111707.96 frames.], batch size: 18, lr: 3.63e-04 +2022-05-27 23:45:37,067 INFO [train.py:823] (1/4) Epoch 47, batch 350, loss[loss=0.1579, simple_loss=0.2448, pruned_loss=0.0355, over 7287.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2435, pruned_loss=0.03143, over 1179185.09 frames.], batch size: 20, lr: 3.62e-04 +2022-05-27 23:46:17,195 INFO [train.py:823] (1/4) Epoch 47, batch 400, loss[loss=0.1447, simple_loss=0.2369, pruned_loss=0.02622, over 7278.00 frames.], tot_loss[loss=0.1542, simple_loss=0.245, pruned_loss=0.03171, over 1233472.43 frames.], batch size: 20, lr: 3.62e-04 +2022-05-27 23:46:56,159 INFO [train.py:823] (1/4) Epoch 47, batch 450, loss[loss=0.1258, simple_loss=0.2113, pruned_loss=0.02018, over 7162.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2441, pruned_loss=0.03172, over 1274457.72 frames.], batch size: 17, lr: 3.62e-04 +2022-05-27 23:47:36,406 INFO [train.py:823] (1/4) Epoch 47, batch 500, loss[loss=0.136, simple_loss=0.2278, pruned_loss=0.02209, over 7095.00 frames.], tot_loss[loss=0.1536, simple_loss=0.244, pruned_loss=0.03155, over 1302667.20 frames.], batch size: 19, lr: 3.62e-04 +2022-05-27 23:48:15,796 INFO [train.py:823] (1/4) Epoch 47, batch 550, loss[loss=0.1513, simple_loss=0.2362, pruned_loss=0.03322, over 7386.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2435, pruned_loss=0.03163, over 1328874.85 frames.], batch size: 19, lr: 3.62e-04 +2022-05-27 23:48:54,648 INFO [train.py:823] (1/4) Epoch 47, batch 600, loss[loss=0.1825, simple_loss=0.2779, pruned_loss=0.0435, over 6981.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2437, pruned_loss=0.03153, over 1347395.69 frames.], batch size: 26, lr: 3.61e-04 +2022-05-27 23:49:34,978 INFO [train.py:823] (1/4) Epoch 47, batch 650, loss[loss=0.1466, simple_loss=0.2252, pruned_loss=0.03405, over 7286.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2443, pruned_loss=0.03174, over 1364860.68 frames.], batch size: 17, lr: 3.61e-04 +2022-05-27 23:50:14,038 INFO [train.py:823] (1/4) Epoch 47, batch 700, loss[loss=0.1963, simple_loss=0.2954, pruned_loss=0.0486, over 7350.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2446, pruned_loss=0.03215, over 1372291.43 frames.], batch size: 23, lr: 3.61e-04 +2022-05-27 23:50:53,638 INFO [train.py:823] (1/4) Epoch 47, batch 750, loss[loss=0.1167, simple_loss=0.1999, pruned_loss=0.01674, over 7300.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2439, pruned_loss=0.0318, over 1384196.74 frames.], batch size: 19, lr: 3.61e-04 +2022-05-27 23:51:32,289 INFO [train.py:823] (1/4) Epoch 47, batch 800, loss[loss=0.1522, simple_loss=0.2503, pruned_loss=0.0271, over 6965.00 frames.], tot_loss[loss=0.153, simple_loss=0.2434, pruned_loss=0.03129, over 1391061.70 frames.], batch size: 26, lr: 3.61e-04 +2022-05-27 23:52:11,462 INFO [train.py:823] (1/4) Epoch 47, batch 850, loss[loss=0.1303, simple_loss=0.2152, pruned_loss=0.02276, over 7195.00 frames.], tot_loss[loss=0.153, simple_loss=0.2433, pruned_loss=0.03135, over 1392009.81 frames.], batch size: 18, lr: 3.60e-04 +2022-05-27 23:52:50,574 INFO [train.py:823] (1/4) Epoch 47, batch 900, loss[loss=0.1612, simple_loss=0.2564, pruned_loss=0.033, over 7307.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2435, pruned_loss=0.03142, over 1397453.59 frames.], batch size: 22, lr: 3.60e-04 +2022-05-27 23:53:43,368 INFO [train.py:823] (1/4) Epoch 48, batch 0, loss[loss=0.1728, simple_loss=0.2502, pruned_loss=0.04772, over 7190.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2502, pruned_loss=0.04772, over 7190.00 frames.], batch size: 21, lr: 3.56e-04 +2022-05-27 23:54:22,612 INFO [train.py:823] (1/4) Epoch 48, batch 50, loss[loss=0.1259, simple_loss=0.2079, pruned_loss=0.02192, over 7143.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2435, pruned_loss=0.0318, over 320071.33 frames.], batch size: 17, lr: 3.56e-04 +2022-05-27 23:55:01,965 INFO [train.py:823] (1/4) Epoch 48, batch 100, loss[loss=0.1705, simple_loss=0.2743, pruned_loss=0.03338, over 7223.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2443, pruned_loss=0.03211, over 565858.60 frames.], batch size: 25, lr: 3.56e-04 +2022-05-27 23:55:41,023 INFO [train.py:823] (1/4) Epoch 48, batch 150, loss[loss=0.1386, simple_loss=0.2208, pruned_loss=0.02817, over 7294.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2459, pruned_loss=0.03289, over 760494.79 frames.], batch size: 17, lr: 3.56e-04 +2022-05-27 23:56:20,136 INFO [train.py:823] (1/4) Epoch 48, batch 200, loss[loss=0.1813, simple_loss=0.2787, pruned_loss=0.04196, over 7308.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2443, pruned_loss=0.03225, over 906640.64 frames.], batch size: 22, lr: 3.55e-04 +2022-05-27 23:56:59,319 INFO [train.py:823] (1/4) Epoch 48, batch 250, loss[loss=0.1528, simple_loss=0.2427, pruned_loss=0.03144, over 7207.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2435, pruned_loss=0.03203, over 1023203.23 frames.], batch size: 19, lr: 3.55e-04 +2022-05-27 23:57:38,895 INFO [train.py:823] (1/4) Epoch 48, batch 300, loss[loss=0.1551, simple_loss=0.251, pruned_loss=0.02957, over 6970.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2425, pruned_loss=0.03159, over 1115887.08 frames.], batch size: 26, lr: 3.55e-04 +2022-05-27 23:58:17,868 INFO [train.py:823] (1/4) Epoch 48, batch 350, loss[loss=0.1664, simple_loss=0.2551, pruned_loss=0.03885, over 4832.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2425, pruned_loss=0.03149, over 1182417.81 frames.], batch size: 47, lr: 3.55e-04 +2022-05-27 23:58:57,367 INFO [train.py:823] (1/4) Epoch 48, batch 400, loss[loss=0.1414, simple_loss=0.238, pruned_loss=0.02241, over 6479.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2422, pruned_loss=0.03146, over 1238762.48 frames.], batch size: 34, lr: 3.55e-04 +2022-05-27 23:59:36,496 INFO [train.py:823] (1/4) Epoch 48, batch 450, loss[loss=0.1523, simple_loss=0.2395, pruned_loss=0.03255, over 7295.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2432, pruned_loss=0.03182, over 1280787.90 frames.], batch size: 17, lr: 3.54e-04 +2022-05-28 00:00:15,793 INFO [train.py:823] (1/4) Epoch 48, batch 500, loss[loss=0.1578, simple_loss=0.2598, pruned_loss=0.02789, over 7197.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2442, pruned_loss=0.0321, over 1311108.41 frames.], batch size: 20, lr: 3.54e-04 +2022-05-28 00:00:54,490 INFO [train.py:823] (1/4) Epoch 48, batch 550, loss[loss=0.1611, simple_loss=0.2587, pruned_loss=0.03173, over 7427.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2442, pruned_loss=0.03174, over 1331642.82 frames.], batch size: 22, lr: 3.54e-04 +2022-05-28 00:01:33,243 INFO [train.py:823] (1/4) Epoch 48, batch 600, loss[loss=0.1491, simple_loss=0.239, pruned_loss=0.02962, over 7282.00 frames.], tot_loss[loss=0.1544, simple_loss=0.245, pruned_loss=0.03186, over 1350619.39 frames.], batch size: 20, lr: 3.54e-04 +2022-05-28 00:02:11,707 INFO [train.py:823] (1/4) Epoch 48, batch 650, loss[loss=0.1428, simple_loss=0.2331, pruned_loss=0.02623, over 7381.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2444, pruned_loss=0.0315, over 1364839.86 frames.], batch size: 21, lr: 3.54e-04 +2022-05-28 00:02:51,183 INFO [train.py:823] (1/4) Epoch 48, batch 700, loss[loss=0.1666, simple_loss=0.2528, pruned_loss=0.04018, over 7180.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2446, pruned_loss=0.03116, over 1372019.24 frames.], batch size: 22, lr: 3.53e-04 +2022-05-28 00:03:30,287 INFO [train.py:823] (1/4) Epoch 48, batch 750, loss[loss=0.1641, simple_loss=0.2463, pruned_loss=0.04098, over 7107.00 frames.], tot_loss[loss=0.1531, simple_loss=0.244, pruned_loss=0.03113, over 1383798.57 frames.], batch size: 19, lr: 3.53e-04 +2022-05-28 00:04:09,537 INFO [train.py:823] (1/4) Epoch 48, batch 800, loss[loss=0.1587, simple_loss=0.2567, pruned_loss=0.03033, over 7349.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2436, pruned_loss=0.0309, over 1391335.08 frames.], batch size: 23, lr: 3.53e-04 +2022-05-28 00:04:48,529 INFO [train.py:823] (1/4) Epoch 48, batch 850, loss[loss=0.1524, simple_loss=0.2393, pruned_loss=0.03279, over 7293.00 frames.], tot_loss[loss=0.153, simple_loss=0.2434, pruned_loss=0.03127, over 1391709.23 frames.], batch size: 17, lr: 3.53e-04 +2022-05-28 00:05:27,345 INFO [train.py:823] (1/4) Epoch 48, batch 900, loss[loss=0.1534, simple_loss=0.2439, pruned_loss=0.03139, over 7290.00 frames.], tot_loss[loss=0.153, simple_loss=0.2435, pruned_loss=0.03125, over 1395355.89 frames.], batch size: 19, lr: 3.53e-04 +2022-05-28 00:06:18,116 INFO [train.py:823] (1/4) Epoch 49, batch 0, loss[loss=0.1517, simple_loss=0.2464, pruned_loss=0.02846, over 7378.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2464, pruned_loss=0.02846, over 7378.00 frames.], batch size: 20, lr: 3.49e-04 +2022-05-28 00:06:57,230 INFO [train.py:823] (1/4) Epoch 49, batch 50, loss[loss=0.1414, simple_loss=0.2384, pruned_loss=0.02221, over 7276.00 frames.], tot_loss[loss=0.1528, simple_loss=0.244, pruned_loss=0.03078, over 318735.81 frames.], batch size: 21, lr: 3.49e-04 +2022-05-28 00:07:37,568 INFO [train.py:823] (1/4) Epoch 49, batch 100, loss[loss=0.1612, simple_loss=0.2386, pruned_loss=0.04189, over 7181.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2435, pruned_loss=0.03135, over 560671.69 frames.], batch size: 18, lr: 3.48e-04 +2022-05-28 00:08:16,663 INFO [train.py:823] (1/4) Epoch 49, batch 150, loss[loss=0.1434, simple_loss=0.2282, pruned_loss=0.02936, over 4704.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2416, pruned_loss=0.03054, over 751079.39 frames.], batch size: 47, lr: 3.48e-04 +2022-05-28 00:08:56,077 INFO [train.py:823] (1/4) Epoch 49, batch 200, loss[loss=0.1598, simple_loss=0.2503, pruned_loss=0.03467, over 7125.00 frames.], tot_loss[loss=0.1512, simple_loss=0.241, pruned_loss=0.03071, over 901388.92 frames.], batch size: 23, lr: 3.48e-04 +2022-05-28 00:09:37,921 INFO [train.py:823] (1/4) Epoch 49, batch 250, loss[loss=0.1598, simple_loss=0.2451, pruned_loss=0.03727, over 7200.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2425, pruned_loss=0.03088, over 1021745.64 frames.], batch size: 20, lr: 3.48e-04 +2022-05-28 00:10:16,990 INFO [train.py:823] (1/4) Epoch 49, batch 300, loss[loss=0.1565, simple_loss=0.2387, pruned_loss=0.03716, over 7304.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2422, pruned_loss=0.03056, over 1113795.49 frames.], batch size: 18, lr: 3.48e-04 +2022-05-28 00:10:56,112 INFO [train.py:823] (1/4) Epoch 49, batch 350, loss[loss=0.1635, simple_loss=0.2611, pruned_loss=0.03292, over 7202.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2423, pruned_loss=0.03054, over 1176804.46 frames.], batch size: 25, lr: 3.48e-04 +2022-05-28 00:11:35,208 INFO [train.py:823] (1/4) Epoch 49, batch 400, loss[loss=0.1307, simple_loss=0.2063, pruned_loss=0.02753, over 7008.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2424, pruned_loss=0.03097, over 1228102.94 frames.], batch size: 16, lr: 3.47e-04 +2022-05-28 00:12:14,615 INFO [train.py:823] (1/4) Epoch 49, batch 450, loss[loss=0.1708, simple_loss=0.271, pruned_loss=0.03526, over 7242.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2424, pruned_loss=0.03109, over 1273703.01 frames.], batch size: 24, lr: 3.47e-04 +2022-05-28 00:12:54,457 INFO [train.py:823] (1/4) Epoch 49, batch 500, loss[loss=0.1505, simple_loss=0.2414, pruned_loss=0.02979, over 6654.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2421, pruned_loss=0.03107, over 1305687.91 frames.], batch size: 34, lr: 3.47e-04 +2022-05-28 00:13:33,848 INFO [train.py:823] (1/4) Epoch 49, batch 550, loss[loss=0.1661, simple_loss=0.2454, pruned_loss=0.04338, over 7291.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2413, pruned_loss=0.03092, over 1332660.35 frames.], batch size: 17, lr: 3.47e-04 +2022-05-28 00:14:12,718 INFO [train.py:823] (1/4) Epoch 49, batch 600, loss[loss=0.1549, simple_loss=0.2344, pruned_loss=0.03773, over 7227.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2416, pruned_loss=0.031, over 1351102.12 frames.], batch size: 24, lr: 3.47e-04 +2022-05-28 00:14:52,408 INFO [train.py:823] (1/4) Epoch 49, batch 650, loss[loss=0.1393, simple_loss=0.2253, pruned_loss=0.02664, over 7150.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2414, pruned_loss=0.0307, over 1367542.19 frames.], batch size: 17, lr: 3.46e-04 +2022-05-28 00:15:31,429 INFO [train.py:823] (1/4) Epoch 49, batch 700, loss[loss=0.1415, simple_loss=0.2411, pruned_loss=0.021, over 7414.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2427, pruned_loss=0.03121, over 1370981.96 frames.], batch size: 22, lr: 3.46e-04 +2022-05-28 00:16:10,997 INFO [train.py:823] (1/4) Epoch 49, batch 750, loss[loss=0.1481, simple_loss=0.2275, pruned_loss=0.03433, over 7290.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2432, pruned_loss=0.0312, over 1381036.69 frames.], batch size: 19, lr: 3.46e-04 +2022-05-28 00:16:49,937 INFO [train.py:823] (1/4) Epoch 49, batch 800, loss[loss=0.1347, simple_loss=0.2141, pruned_loss=0.0276, over 7143.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2433, pruned_loss=0.03111, over 1385316.65 frames.], batch size: 17, lr: 3.46e-04 +2022-05-28 00:17:29,771 INFO [train.py:823] (1/4) Epoch 49, batch 850, loss[loss=0.1287, simple_loss=0.2186, pruned_loss=0.01936, over 7099.00 frames.], tot_loss[loss=0.1518, simple_loss=0.2422, pruned_loss=0.03071, over 1391380.87 frames.], batch size: 18, lr: 3.46e-04 +2022-05-28 00:18:08,747 INFO [train.py:823] (1/4) Epoch 49, batch 900, loss[loss=0.1566, simple_loss=0.2494, pruned_loss=0.03187, over 6437.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2425, pruned_loss=0.0307, over 1393757.99 frames.], batch size: 34, lr: 3.45e-04 +2022-05-28 00:19:00,802 INFO [train.py:823] (1/4) Epoch 50, batch 0, loss[loss=0.1835, simple_loss=0.2747, pruned_loss=0.0461, over 7102.00 frames.], tot_loss[loss=0.1835, simple_loss=0.2747, pruned_loss=0.0461, over 7102.00 frames.], batch size: 29, lr: 3.42e-04 +2022-05-28 00:19:39,996 INFO [train.py:823] (1/4) Epoch 50, batch 50, loss[loss=0.1454, simple_loss=0.2484, pruned_loss=0.02117, over 7278.00 frames.], tot_loss[loss=0.1507, simple_loss=0.24, pruned_loss=0.03068, over 322576.71 frames.], batch size: 20, lr: 3.42e-04 +2022-05-28 00:20:19,159 INFO [train.py:823] (1/4) Epoch 50, batch 100, loss[loss=0.1674, simple_loss=0.2562, pruned_loss=0.03928, over 7165.00 frames.], tot_loss[loss=0.1499, simple_loss=0.2401, pruned_loss=0.02985, over 563946.34 frames.], batch size: 23, lr: 3.41e-04 +2022-05-28 00:20:58,259 INFO [train.py:823] (1/4) Epoch 50, batch 150, loss[loss=0.1442, simple_loss=0.2379, pruned_loss=0.02524, over 7381.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2431, pruned_loss=0.03074, over 752567.33 frames.], batch size: 21, lr: 3.41e-04 +2022-05-28 00:21:37,459 INFO [train.py:823] (1/4) Epoch 50, batch 200, loss[loss=0.1516, simple_loss=0.2456, pruned_loss=0.02881, over 7096.00 frames.], tot_loss[loss=0.1509, simple_loss=0.2417, pruned_loss=0.03007, over 901796.41 frames.], batch size: 18, lr: 3.41e-04 +2022-05-28 00:22:16,557 INFO [train.py:823] (1/4) Epoch 50, batch 250, loss[loss=0.1577, simple_loss=0.246, pruned_loss=0.03473, over 7167.00 frames.], tot_loss[loss=0.1501, simple_loss=0.2404, pruned_loss=0.02994, over 1020177.41 frames.], batch size: 22, lr: 3.41e-04 +2022-05-28 00:22:55,490 INFO [train.py:823] (1/4) Epoch 50, batch 300, loss[loss=0.142, simple_loss=0.2425, pruned_loss=0.02078, over 7195.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2426, pruned_loss=0.03091, over 1110377.88 frames.], batch size: 20, lr: 3.41e-04 +2022-05-28 00:23:34,703 INFO [train.py:823] (1/4) Epoch 50, batch 350, loss[loss=0.1555, simple_loss=0.245, pruned_loss=0.03305, over 7417.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2429, pruned_loss=0.03133, over 1178994.45 frames.], batch size: 22, lr: 3.41e-04 +2022-05-28 00:24:13,715 INFO [train.py:823] (1/4) Epoch 50, batch 400, loss[loss=0.1615, simple_loss=0.2549, pruned_loss=0.03405, over 7029.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2425, pruned_loss=0.03124, over 1233042.73 frames.], batch size: 26, lr: 3.40e-04 +2022-05-28 00:24:52,163 INFO [train.py:823] (1/4) Epoch 50, batch 450, loss[loss=0.1468, simple_loss=0.2329, pruned_loss=0.03035, over 6378.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2426, pruned_loss=0.03097, over 1273840.30 frames.], batch size: 34, lr: 3.40e-04 +2022-05-28 00:25:31,598 INFO [train.py:823] (1/4) Epoch 50, batch 500, loss[loss=0.1433, simple_loss=0.2296, pruned_loss=0.02845, over 7286.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2432, pruned_loss=0.03115, over 1307166.63 frames.], batch size: 19, lr: 3.40e-04 +2022-05-28 00:26:10,824 INFO [train.py:823] (1/4) Epoch 50, batch 550, loss[loss=0.1591, simple_loss=0.2488, pruned_loss=0.03472, over 7232.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2425, pruned_loss=0.03109, over 1334747.11 frames.], batch size: 24, lr: 3.40e-04 +2022-05-28 00:26:49,676 INFO [train.py:823] (1/4) Epoch 50, batch 600, loss[loss=0.1463, simple_loss=0.2295, pruned_loss=0.03159, over 7027.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2427, pruned_loss=0.03128, over 1352591.42 frames.], batch size: 16, lr: 3.40e-04 +2022-05-28 00:27:28,474 INFO [train.py:823] (1/4) Epoch 50, batch 650, loss[loss=0.1385, simple_loss=0.2236, pruned_loss=0.0267, over 7009.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2435, pruned_loss=0.03137, over 1363437.61 frames.], batch size: 16, lr: 3.39e-04 +2022-05-28 00:28:07,682 INFO [train.py:823] (1/4) Epoch 50, batch 700, loss[loss=0.1369, simple_loss=0.2202, pruned_loss=0.02685, over 7018.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2425, pruned_loss=0.03111, over 1375529.33 frames.], batch size: 16, lr: 3.39e-04 +2022-05-28 00:28:46,793 INFO [train.py:823] (1/4) Epoch 50, batch 750, loss[loss=0.1466, simple_loss=0.2417, pruned_loss=0.02572, over 7309.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2425, pruned_loss=0.03098, over 1383246.21 frames.], batch size: 22, lr: 3.39e-04 +2022-05-28 00:29:26,154 INFO [train.py:823] (1/4) Epoch 50, batch 800, loss[loss=0.1465, simple_loss=0.2251, pruned_loss=0.03396, over 7092.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2422, pruned_loss=0.0311, over 1391478.30 frames.], batch size: 19, lr: 3.39e-04 +2022-05-28 00:30:05,639 INFO [train.py:823] (1/4) Epoch 50, batch 850, loss[loss=0.1805, simple_loss=0.2612, pruned_loss=0.04985, over 4434.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2427, pruned_loss=0.0311, over 1396447.64 frames.], batch size: 46, lr: 3.39e-04 +2022-05-28 00:30:45,789 INFO [train.py:823] (1/4) Epoch 50, batch 900, loss[loss=0.1407, simple_loss=0.2336, pruned_loss=0.02388, over 6405.00 frames.], tot_loss[loss=0.1528, simple_loss=0.243, pruned_loss=0.03125, over 1399129.83 frames.], batch size: 34, lr: 3.39e-04 +2022-05-28 00:31:24,921 INFO [train.py:1038] (1/4) Done!