diff --git "a/baseline/log/log-train-2022-05-27-13-57-22-2" "b/baseline/log/log-train-2022-05-27-13-57-22-2" new file mode 100644--- /dev/null +++ "b/baseline/log/log-train-2022-05-27-13-57-22-2" @@ -0,0 +1,982 @@ +2022-05-27 13:57:22,684 INFO [train.py:887] (2/4) Training started +2022-05-27 13:57:22,684 INFO [train.py:897] (2/4) Device: cuda:2 +2022-05-27 13:57:22,687 INFO [train.py:906] (2/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 1600, 'feature_dim': 80, 'subsampling_factor': 4, 'encoder_dim': 512, 'nhead': 8, 'dim_feedforward': 2048, 'num_encoder_layers': 12, 'decoder_dim': 512, 'joiner_dim': 512, 'model_warm_step': 3000, 'env_info': {'k2-version': '1.13', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f4fefe4882bc0ae59af951da3f47335d5495ef71', 'k2-git-date': 'Thu Feb 10 15:16:02 2022', 'lhotse-version': '1.1.0', 'torch-version': '1.10.0+cu102', 'torch-cuda-available': True, 'torch-cuda-version': '10.2', 'python-version': '3.8', 'icefall-git-branch': 'stateless6', 'icefall-git-sha1': '50641cd-dirty', 'icefall-git-date': 'Fri May 27 13:49:39 2022', 'icefall-path': '/ceph-data2/ly/open_source/vq2_icefall', 'k2-path': '/ceph-jb/yaozengwei/workspace/rnnt/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-ly/open-source/hubert/lhotse/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-9-0425111216-65f66bdf4-bkrql', 'IP address': '10.177.77.9'}, 'enable_distiallation': False, 'distillation_layer': 5, 'num_codebooks': 16, 'world_size': 4, 'master_port': 12359, 'tensorboard': True, 'num_epochs': 50, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless6/exp'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'initial_lr': 0.003, 'lr_batches': 5000, 'lr_epochs': 6, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'codebook_loss_scale': 0.1, 'seed': 42, 'print_diagnostics': False, 'save_every_n': 8000, 'keep_last_k': 20, 'average_period': 100, 'use_fp16': False, 'full_libri': False, 'manifest_dir': PosixPath('data/vq_fbank'), 'max_duration': 300, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': -1, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2022-05-27 13:57:22,687 INFO [train.py:908] (2/4) About to create model +2022-05-27 13:57:23,178 INFO [train.py:912] (2/4) Number of model parameters: 78648040 +2022-05-27 13:57:29,091 INFO [train.py:927] (2/4) Using DDP +2022-05-27 13:57:29,321 INFO [asr_datamodule.py:408] (2/4) About to get train-clean-100 cuts +2022-05-27 13:57:38,254 INFO [asr_datamodule.py:225] (2/4) Enable MUSAN +2022-05-27 13:57:38,255 INFO [asr_datamodule.py:226] (2/4) About to get Musan cuts +2022-05-27 13:57:41,847 INFO [asr_datamodule.py:254] (2/4) Enable SpecAugment +2022-05-27 13:57:41,847 INFO [asr_datamodule.py:255] (2/4) Time warp factor: -1 +2022-05-27 13:57:41,847 INFO [asr_datamodule.py:267] (2/4) Num frame mask: 10 +2022-05-27 13:57:41,848 INFO [asr_datamodule.py:280] (2/4) About to create train dataset +2022-05-27 13:57:41,848 INFO [asr_datamodule.py:309] (2/4) Using BucketingSampler. +2022-05-27 13:57:42,201 INFO [asr_datamodule.py:325] (2/4) About to create train dataloader +2022-05-27 13:57:42,203 INFO [asr_datamodule.py:429] (2/4) About to get dev-clean cuts +2022-05-27 13:57:42,378 INFO [asr_datamodule.py:434] (2/4) About to get dev-other cuts +2022-05-27 13:57:42,524 INFO [asr_datamodule.py:356] (2/4) About to create dev dataset +2022-05-27 13:57:42,539 INFO [asr_datamodule.py:375] (2/4) About to create dev dataloader +2022-05-27 13:57:42,540 INFO [train.py:1054] (2/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2022-05-27 13:57:47,525 INFO [distributed.py:874] (2/4) Reducer buckets have been rebuilt in this iteration. +2022-05-27 13:58:01,578 INFO [train.py:823] (2/4) Epoch 1, batch 0, loss[loss=0.9516, simple_loss=1.903, pruned_loss=6.901, over 7281.00 frames.], tot_loss[loss=0.9516, simple_loss=1.903, pruned_loss=6.901, over 7281.00 frames.], batch size: 21, lr: 3.00e-03 +2022-05-27 13:58:40,746 INFO [train.py:823] (2/4) Epoch 1, batch 50, loss[loss=0.5083, simple_loss=1.017, pruned_loss=7.015, over 7148.00 frames.], tot_loss[loss=0.5715, simple_loss=1.143, pruned_loss=7.151, over 322117.15 frames.], batch size: 23, lr: 3.00e-03 +2022-05-27 13:59:20,100 INFO [train.py:823] (2/4) Epoch 1, batch 100, loss[loss=0.443, simple_loss=0.886, pruned_loss=6.936, over 7198.00 frames.], tot_loss[loss=0.5134, simple_loss=1.027, pruned_loss=7.07, over 564350.66 frames.], batch size: 20, lr: 3.00e-03 +2022-05-27 13:59:59,662 INFO [train.py:823] (2/4) Epoch 1, batch 150, loss[loss=0.4464, simple_loss=0.8928, pruned_loss=6.844, over 7335.00 frames.], tot_loss[loss=0.4793, simple_loss=0.9586, pruned_loss=6.979, over 754765.62 frames.], batch size: 23, lr: 3.00e-03 +2022-05-27 14:00:39,086 INFO [train.py:823] (2/4) Epoch 1, batch 200, loss[loss=0.3989, simple_loss=0.7978, pruned_loss=6.668, over 7289.00 frames.], tot_loss[loss=0.4584, simple_loss=0.9169, pruned_loss=6.906, over 904582.73 frames.], batch size: 19, lr: 3.00e-03 +2022-05-27 14:01:18,222 INFO [train.py:823] (2/4) Epoch 1, batch 250, loss[loss=0.3637, simple_loss=0.7275, pruned_loss=6.501, over 7283.00 frames.], tot_loss[loss=0.4426, simple_loss=0.8853, pruned_loss=6.836, over 1014628.52 frames.], batch size: 17, lr: 3.00e-03 +2022-05-27 14:01:57,504 INFO [train.py:823] (2/4) Epoch 1, batch 300, loss[loss=0.4135, simple_loss=0.827, pruned_loss=6.724, over 7216.00 frames.], tot_loss[loss=0.4301, simple_loss=0.8603, pruned_loss=6.784, over 1106272.13 frames.], batch size: 24, lr: 3.00e-03 +2022-05-27 14:02:36,750 INFO [train.py:823] (2/4) Epoch 1, batch 350, loss[loss=0.4002, simple_loss=0.8003, pruned_loss=6.617, over 6459.00 frames.], tot_loss[loss=0.4196, simple_loss=0.8391, pruned_loss=6.747, over 1177244.22 frames.], batch size: 34, lr: 3.00e-03 +2022-05-27 14:03:16,163 INFO [train.py:823] (2/4) Epoch 1, batch 400, loss[loss=0.3793, simple_loss=0.7585, pruned_loss=6.58, over 4812.00 frames.], tot_loss[loss=0.41, simple_loss=0.8199, pruned_loss=6.721, over 1228059.48 frames.], batch size: 46, lr: 3.00e-03 +2022-05-27 14:03:55,422 INFO [train.py:823] (2/4) Epoch 1, batch 450, loss[loss=0.3515, simple_loss=0.703, pruned_loss=6.595, over 7205.00 frames.], tot_loss[loss=0.3973, simple_loss=0.7947, pruned_loss=6.704, over 1274115.10 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:04:34,570 INFO [train.py:823] (2/4) Epoch 1, batch 500, loss[loss=0.2958, simple_loss=0.5916, pruned_loss=6.545, over 7389.00 frames.], tot_loss[loss=0.3829, simple_loss=0.7657, pruned_loss=6.698, over 1308811.12 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:05:13,695 INFO [train.py:823] (2/4) Epoch 1, batch 550, loss[loss=0.3018, simple_loss=0.6037, pruned_loss=6.753, over 7221.00 frames.], tot_loss[loss=0.3653, simple_loss=0.7306, pruned_loss=6.692, over 1329725.40 frames.], batch size: 25, lr: 2.99e-03 +2022-05-27 14:05:53,154 INFO [train.py:823] (2/4) Epoch 1, batch 600, loss[loss=0.2646, simple_loss=0.5292, pruned_loss=6.608, over 7280.00 frames.], tot_loss[loss=0.3476, simple_loss=0.6952, pruned_loss=6.686, over 1346577.44 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:06:31,945 INFO [train.py:823] (2/4) Epoch 1, batch 650, loss[loss=0.2671, simple_loss=0.5342, pruned_loss=6.723, over 7099.00 frames.], tot_loss[loss=0.3333, simple_loss=0.6666, pruned_loss=6.688, over 1361440.64 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:07:11,428 INFO [train.py:823] (2/4) Epoch 1, batch 700, loss[loss=0.2352, simple_loss=0.4705, pruned_loss=6.551, over 7167.00 frames.], tot_loss[loss=0.3175, simple_loss=0.6349, pruned_loss=6.686, over 1373624.75 frames.], batch size: 17, lr: 2.99e-03 +2022-05-27 14:07:50,431 INFO [train.py:823] (2/4) Epoch 1, batch 750, loss[loss=0.2262, simple_loss=0.4524, pruned_loss=6.589, over 7224.00 frames.], tot_loss[loss=0.3035, simple_loss=0.6069, pruned_loss=6.689, over 1386861.15 frames.], batch size: 16, lr: 2.98e-03 +2022-05-27 14:08:29,953 INFO [train.py:823] (2/4) Epoch 1, batch 800, loss[loss=0.261, simple_loss=0.522, pruned_loss=6.798, over 7142.00 frames.], tot_loss[loss=0.2916, simple_loss=0.5831, pruned_loss=6.694, over 1392645.66 frames.], batch size: 23, lr: 2.98e-03 +2022-05-27 14:09:08,887 INFO [train.py:823] (2/4) Epoch 1, batch 850, loss[loss=0.209, simple_loss=0.418, pruned_loss=6.594, over 7028.00 frames.], tot_loss[loss=0.2821, simple_loss=0.5642, pruned_loss=6.702, over 1400094.73 frames.], batch size: 16, lr: 2.98e-03 +2022-05-27 14:09:47,695 INFO [train.py:823] (2/4) Epoch 1, batch 900, loss[loss=0.2085, simple_loss=0.417, pruned_loss=6.651, over 7298.00 frames.], tot_loss[loss=0.2723, simple_loss=0.5446, pruned_loss=6.709, over 1403223.21 frames.], batch size: 17, lr: 2.98e-03 +2022-05-27 14:10:41,071 INFO [train.py:823] (2/4) Epoch 2, batch 0, loss[loss=0.2376, simple_loss=0.4753, pruned_loss=6.809, over 7096.00 frames.], tot_loss[loss=0.2376, simple_loss=0.4753, pruned_loss=6.809, over 7096.00 frames.], batch size: 19, lr: 2.95e-03 +2022-05-27 14:11:20,537 INFO [train.py:823] (2/4) Epoch 2, batch 50, loss[loss=0.2454, simple_loss=0.4909, pruned_loss=6.836, over 7375.00 frames.], tot_loss[loss=0.2273, simple_loss=0.4545, pruned_loss=6.722, over 322100.00 frames.], batch size: 21, lr: 2.95e-03 +2022-05-27 14:12:00,013 INFO [train.py:823] (2/4) Epoch 2, batch 100, loss[loss=0.2393, simple_loss=0.4786, pruned_loss=6.771, over 7014.00 frames.], tot_loss[loss=0.2268, simple_loss=0.4536, pruned_loss=6.733, over 564870.97 frames.], batch size: 26, lr: 2.95e-03 +2022-05-27 14:12:39,364 INFO [train.py:823] (2/4) Epoch 2, batch 150, loss[loss=0.1964, simple_loss=0.3928, pruned_loss=6.74, over 7297.00 frames.], tot_loss[loss=0.2246, simple_loss=0.4491, pruned_loss=6.737, over 758264.95 frames.], batch size: 17, lr: 2.94e-03 +2022-05-27 14:13:18,840 INFO [train.py:823] (2/4) Epoch 2, batch 200, loss[loss=0.1957, simple_loss=0.3914, pruned_loss=6.704, over 7092.00 frames.], tot_loss[loss=0.2215, simple_loss=0.443, pruned_loss=6.741, over 906121.55 frames.], batch size: 18, lr: 2.94e-03 +2022-05-27 14:13:58,021 INFO [train.py:823] (2/4) Epoch 2, batch 250, loss[loss=0.2182, simple_loss=0.4364, pruned_loss=6.607, over 7168.00 frames.], tot_loss[loss=0.2196, simple_loss=0.4391, pruned_loss=6.736, over 1016975.65 frames.], batch size: 17, lr: 2.94e-03 +2022-05-27 14:14:37,498 INFO [train.py:823] (2/4) Epoch 2, batch 300, loss[loss=0.1794, simple_loss=0.3589, pruned_loss=6.62, over 7030.00 frames.], tot_loss[loss=0.2173, simple_loss=0.4345, pruned_loss=6.739, over 1108385.62 frames.], batch size: 16, lr: 2.93e-03 +2022-05-27 14:15:20,539 INFO [train.py:823] (2/4) Epoch 2, batch 350, loss[loss=0.2019, simple_loss=0.4038, pruned_loss=6.76, over 7137.00 frames.], tot_loss[loss=0.2158, simple_loss=0.4315, pruned_loss=6.742, over 1175723.80 frames.], batch size: 23, lr: 2.93e-03 +2022-05-27 14:15:59,778 INFO [train.py:823] (2/4) Epoch 2, batch 400, loss[loss=0.2138, simple_loss=0.4275, pruned_loss=6.819, over 7098.00 frames.], tot_loss[loss=0.2144, simple_loss=0.4287, pruned_loss=6.743, over 1226632.41 frames.], batch size: 18, lr: 2.93e-03 +2022-05-27 14:16:38,936 INFO [train.py:823] (2/4) Epoch 2, batch 450, loss[loss=0.2107, simple_loss=0.4214, pruned_loss=6.825, over 7290.00 frames.], tot_loss[loss=0.2121, simple_loss=0.4242, pruned_loss=6.75, over 1267549.91 frames.], batch size: 21, lr: 2.92e-03 +2022-05-27 14:17:18,221 INFO [train.py:823] (2/4) Epoch 2, batch 500, loss[loss=0.2355, simple_loss=0.4709, pruned_loss=6.863, over 6883.00 frames.], tot_loss[loss=0.2112, simple_loss=0.4225, pruned_loss=6.756, over 1303748.04 frames.], batch size: 29, lr: 2.92e-03 +2022-05-27 14:17:57,125 INFO [train.py:823] (2/4) Epoch 2, batch 550, loss[loss=0.2028, simple_loss=0.4057, pruned_loss=6.764, over 5104.00 frames.], tot_loss[loss=0.2097, simple_loss=0.4194, pruned_loss=6.757, over 1325316.18 frames.], batch size: 47, lr: 2.92e-03 +2022-05-27 14:18:36,799 INFO [train.py:823] (2/4) Epoch 2, batch 600, loss[loss=0.2384, simple_loss=0.4768, pruned_loss=6.9, over 7273.00 frames.], tot_loss[loss=0.2078, simple_loss=0.4156, pruned_loss=6.757, over 1341943.69 frames.], batch size: 21, lr: 2.91e-03 +2022-05-27 14:19:16,326 INFO [train.py:823] (2/4) Epoch 2, batch 650, loss[loss=0.199, simple_loss=0.3979, pruned_loss=6.72, over 7293.00 frames.], tot_loss[loss=0.2068, simple_loss=0.4136, pruned_loss=6.762, over 1359775.74 frames.], batch size: 22, lr: 2.91e-03 +2022-05-27 14:19:56,895 INFO [train.py:823] (2/4) Epoch 2, batch 700, loss[loss=0.1705, simple_loss=0.341, pruned_loss=6.661, over 7426.00 frames.], tot_loss[loss=0.2043, simple_loss=0.4087, pruned_loss=6.762, over 1375548.09 frames.], batch size: 18, lr: 2.90e-03 +2022-05-27 14:20:36,654 INFO [train.py:823] (2/4) Epoch 2, batch 750, loss[loss=0.2134, simple_loss=0.4267, pruned_loss=6.893, over 7108.00 frames.], tot_loss[loss=0.2024, simple_loss=0.4048, pruned_loss=6.763, over 1382393.39 frames.], batch size: 20, lr: 2.90e-03 +2022-05-27 14:21:16,567 INFO [train.py:823] (2/4) Epoch 2, batch 800, loss[loss=0.2302, simple_loss=0.4604, pruned_loss=6.858, over 5144.00 frames.], tot_loss[loss=0.2012, simple_loss=0.4024, pruned_loss=6.765, over 1387867.65 frames.], batch size: 48, lr: 2.89e-03 +2022-05-27 14:21:57,230 INFO [train.py:823] (2/4) Epoch 2, batch 850, loss[loss=0.1924, simple_loss=0.3848, pruned_loss=6.687, over 7186.00 frames.], tot_loss[loss=0.2003, simple_loss=0.4005, pruned_loss=6.767, over 1392033.16 frames.], batch size: 20, lr: 2.89e-03 +2022-05-27 14:22:36,474 INFO [train.py:823] (2/4) Epoch 2, batch 900, loss[loss=0.1723, simple_loss=0.3446, pruned_loss=6.71, over 7305.00 frames.], tot_loss[loss=0.1995, simple_loss=0.3989, pruned_loss=6.771, over 1396184.10 frames.], batch size: 18, lr: 2.89e-03 +2022-05-27 14:23:29,985 INFO [train.py:823] (2/4) Epoch 3, batch 0, loss[loss=0.1663, simple_loss=0.3326, pruned_loss=6.682, over 7289.00 frames.], tot_loss[loss=0.1663, simple_loss=0.3326, pruned_loss=6.682, over 7289.00 frames.], batch size: 17, lr: 2.83e-03 +2022-05-27 14:24:09,643 INFO [train.py:823] (2/4) Epoch 3, batch 50, loss[loss=0.2117, simple_loss=0.4234, pruned_loss=6.735, over 5017.00 frames.], tot_loss[loss=0.1888, simple_loss=0.3776, pruned_loss=6.75, over 319459.35 frames.], batch size: 48, lr: 2.82e-03 +2022-05-27 14:24:48,971 INFO [train.py:823] (2/4) Epoch 3, batch 100, loss[loss=0.2141, simple_loss=0.4282, pruned_loss=6.96, over 6955.00 frames.], tot_loss[loss=0.1884, simple_loss=0.3768, pruned_loss=6.756, over 565448.70 frames.], batch size: 26, lr: 2.82e-03 +2022-05-27 14:25:28,672 INFO [train.py:823] (2/4) Epoch 3, batch 150, loss[loss=0.188, simple_loss=0.376, pruned_loss=6.778, over 7381.00 frames.], tot_loss[loss=0.1886, simple_loss=0.3771, pruned_loss=6.767, over 756278.28 frames.], batch size: 20, lr: 2.81e-03 +2022-05-27 14:26:08,171 INFO [train.py:823] (2/4) Epoch 3, batch 200, loss[loss=0.1742, simple_loss=0.3485, pruned_loss=6.743, over 7114.00 frames.], tot_loss[loss=0.1878, simple_loss=0.3756, pruned_loss=6.765, over 907447.52 frames.], batch size: 20, lr: 2.81e-03 +2022-05-27 14:26:47,675 INFO [train.py:823] (2/4) Epoch 3, batch 250, loss[loss=0.1923, simple_loss=0.3847, pruned_loss=6.799, over 6990.00 frames.], tot_loss[loss=0.1877, simple_loss=0.3753, pruned_loss=6.774, over 1025013.50 frames.], batch size: 26, lr: 2.80e-03 +2022-05-27 14:27:27,037 INFO [train.py:823] (2/4) Epoch 3, batch 300, loss[loss=0.168, simple_loss=0.336, pruned_loss=6.729, over 7381.00 frames.], tot_loss[loss=0.187, simple_loss=0.3739, pruned_loss=6.78, over 1115420.00 frames.], batch size: 19, lr: 2.80e-03 +2022-05-27 14:28:06,762 INFO [train.py:823] (2/4) Epoch 3, batch 350, loss[loss=0.2167, simple_loss=0.4334, pruned_loss=6.902, over 7327.00 frames.], tot_loss[loss=0.1864, simple_loss=0.3728, pruned_loss=6.786, over 1187385.33 frames.], batch size: 23, lr: 2.79e-03 +2022-05-27 14:28:45,743 INFO [train.py:823] (2/4) Epoch 3, batch 400, loss[loss=0.1729, simple_loss=0.3459, pruned_loss=6.677, over 7295.00 frames.], tot_loss[loss=0.1868, simple_loss=0.3735, pruned_loss=6.789, over 1240171.60 frames.], batch size: 18, lr: 2.79e-03 +2022-05-27 14:29:24,495 INFO [train.py:823] (2/4) Epoch 3, batch 450, loss[loss=0.1753, simple_loss=0.3506, pruned_loss=6.706, over 7197.00 frames.], tot_loss[loss=0.1867, simple_loss=0.3734, pruned_loss=6.792, over 1273494.93 frames.], batch size: 18, lr: 2.78e-03 +2022-05-27 14:30:03,765 INFO [train.py:823] (2/4) Epoch 3, batch 500, loss[loss=0.1724, simple_loss=0.3449, pruned_loss=6.693, over 7295.00 frames.], tot_loss[loss=0.1857, simple_loss=0.3715, pruned_loss=6.793, over 1305942.26 frames.], batch size: 18, lr: 2.77e-03 +2022-05-27 14:30:42,957 INFO [train.py:823] (2/4) Epoch 3, batch 550, loss[loss=0.2103, simple_loss=0.4207, pruned_loss=6.849, over 7184.00 frames.], tot_loss[loss=0.1856, simple_loss=0.3712, pruned_loss=6.796, over 1334307.28 frames.], batch size: 21, lr: 2.77e-03 +2022-05-27 14:31:21,904 INFO [train.py:823] (2/4) Epoch 3, batch 600, loss[loss=0.1885, simple_loss=0.3769, pruned_loss=6.804, over 7378.00 frames.], tot_loss[loss=0.1846, simple_loss=0.3693, pruned_loss=6.785, over 1346996.98 frames.], batch size: 20, lr: 2.76e-03 +2022-05-27 14:32:01,057 INFO [train.py:823] (2/4) Epoch 3, batch 650, loss[loss=0.1897, simple_loss=0.3794, pruned_loss=6.83, over 4898.00 frames.], tot_loss[loss=0.1845, simple_loss=0.369, pruned_loss=6.79, over 1364152.75 frames.], batch size: 46, lr: 2.76e-03 +2022-05-27 14:32:40,590 INFO [train.py:823] (2/4) Epoch 3, batch 700, loss[loss=0.1896, simple_loss=0.3792, pruned_loss=6.865, over 7312.00 frames.], tot_loss[loss=0.1844, simple_loss=0.3688, pruned_loss=6.793, over 1376208.78 frames.], batch size: 22, lr: 2.75e-03 +2022-05-27 14:33:19,841 INFO [train.py:823] (2/4) Epoch 3, batch 750, loss[loss=0.1754, simple_loss=0.3508, pruned_loss=6.768, over 7215.00 frames.], tot_loss[loss=0.1835, simple_loss=0.3671, pruned_loss=6.794, over 1384169.80 frames.], batch size: 19, lr: 2.75e-03 +2022-05-27 14:33:58,512 INFO [train.py:823] (2/4) Epoch 3, batch 800, loss[loss=0.1854, simple_loss=0.3709, pruned_loss=6.858, over 7415.00 frames.], tot_loss[loss=0.1836, simple_loss=0.3673, pruned_loss=6.794, over 1394547.08 frames.], batch size: 22, lr: 2.74e-03 +2022-05-27 14:34:38,085 INFO [train.py:823] (2/4) Epoch 3, batch 850, loss[loss=0.1608, simple_loss=0.3217, pruned_loss=6.648, over 7448.00 frames.], tot_loss[loss=0.1833, simple_loss=0.3666, pruned_loss=6.796, over 1397461.82 frames.], batch size: 20, lr: 2.74e-03 +2022-05-27 14:35:16,894 INFO [train.py:823] (2/4) Epoch 3, batch 900, loss[loss=0.184, simple_loss=0.368, pruned_loss=6.822, over 4918.00 frames.], tot_loss[loss=0.1829, simple_loss=0.3657, pruned_loss=6.799, over 1393180.00 frames.], batch size: 47, lr: 2.73e-03 +2022-05-27 14:36:10,850 INFO [train.py:823] (2/4) Epoch 4, batch 0, loss[loss=0.1746, simple_loss=0.3493, pruned_loss=6.783, over 7112.00 frames.], tot_loss[loss=0.1746, simple_loss=0.3493, pruned_loss=6.783, over 7112.00 frames.], batch size: 19, lr: 2.64e-03 +2022-05-27 14:36:49,927 INFO [train.py:823] (2/4) Epoch 4, batch 50, loss[loss=0.1604, simple_loss=0.3208, pruned_loss=6.726, over 7016.00 frames.], tot_loss[loss=0.1721, simple_loss=0.3442, pruned_loss=6.787, over 319918.16 frames.], batch size: 17, lr: 2.64e-03 +2022-05-27 14:37:30,256 INFO [train.py:823] (2/4) Epoch 4, batch 100, loss[loss=0.181, simple_loss=0.362, pruned_loss=6.841, over 7372.00 frames.], tot_loss[loss=0.1743, simple_loss=0.3485, pruned_loss=6.8, over 564640.56 frames.], batch size: 21, lr: 2.63e-03 +2022-05-27 14:38:10,812 INFO [train.py:823] (2/4) Epoch 4, batch 150, loss[loss=0.1518, simple_loss=0.3036, pruned_loss=6.732, over 7171.00 frames.], tot_loss[loss=0.1751, simple_loss=0.3503, pruned_loss=6.803, over 750406.18 frames.], batch size: 17, lr: 2.63e-03 +2022-05-27 14:38:51,395 INFO [train.py:823] (2/4) Epoch 4, batch 200, loss[loss=0.279, simple_loss=0.3532, pruned_loss=1.024, over 7201.00 frames.], tot_loss[loss=0.2739, simple_loss=0.3658, pruned_loss=4.928, over 902324.68 frames.], batch size: 18, lr: 2.62e-03 +2022-05-27 14:39:31,803 INFO [train.py:823] (2/4) Epoch 4, batch 250, loss[loss=0.2393, simple_loss=0.3415, pruned_loss=0.6857, over 7367.00 frames.], tot_loss[loss=0.2705, simple_loss=0.3612, pruned_loss=3.662, over 1020953.51 frames.], batch size: 21, lr: 2.62e-03 +2022-05-27 14:40:11,110 INFO [train.py:823] (2/4) Epoch 4, batch 300, loss[loss=0.2205, simple_loss=0.3501, pruned_loss=0.4548, over 7193.00 frames.], tot_loss[loss=0.261, simple_loss=0.3603, pruned_loss=2.793, over 1106613.13 frames.], batch size: 20, lr: 2.61e-03 +2022-05-27 14:40:49,914 INFO [train.py:823] (2/4) Epoch 4, batch 350, loss[loss=0.2497, simple_loss=0.4204, pruned_loss=0.3948, over 7174.00 frames.], tot_loss[loss=0.2494, simple_loss=0.3594, pruned_loss=2.155, over 1172185.32 frames.], batch size: 23, lr: 2.60e-03 +2022-05-27 14:41:29,743 INFO [train.py:823] (2/4) Epoch 4, batch 400, loss[loss=0.2082, simple_loss=0.3671, pruned_loss=0.2464, over 7171.00 frames.], tot_loss[loss=0.2372, simple_loss=0.3564, pruned_loss=1.676, over 1225044.78 frames.], batch size: 25, lr: 2.60e-03 +2022-05-27 14:42:08,248 INFO [train.py:823] (2/4) Epoch 4, batch 450, loss[loss=0.1788, simple_loss=0.3173, pruned_loss=0.2012, over 7156.00 frames.], tot_loss[loss=0.2273, simple_loss=0.3544, pruned_loss=1.318, over 1267929.38 frames.], batch size: 17, lr: 2.59e-03 +2022-05-27 14:42:47,523 INFO [train.py:823] (2/4) Epoch 4, batch 500, loss[loss=0.2093, simple_loss=0.3762, pruned_loss=0.2123, over 7192.00 frames.], tot_loss[loss=0.2209, simple_loss=0.3552, pruned_loss=1.051, over 1304391.37 frames.], batch size: 25, lr: 2.59e-03 +2022-05-27 14:43:26,663 INFO [train.py:823] (2/4) Epoch 4, batch 550, loss[loss=0.1909, simple_loss=0.3457, pruned_loss=0.1809, over 7393.00 frames.], tot_loss[loss=0.2139, simple_loss=0.3527, pruned_loss=0.8465, over 1331861.48 frames.], batch size: 19, lr: 2.58e-03 +2022-05-27 14:44:06,032 INFO [train.py:823] (2/4) Epoch 4, batch 600, loss[loss=0.1829, simple_loss=0.336, pruned_loss=0.1492, over 7188.00 frames.], tot_loss[loss=0.2089, simple_loss=0.3516, pruned_loss=0.6914, over 1354370.38 frames.], batch size: 21, lr: 2.57e-03 +2022-05-27 14:44:44,742 INFO [train.py:823] (2/4) Epoch 4, batch 650, loss[loss=0.2027, simple_loss=0.3695, pruned_loss=0.1793, over 7366.00 frames.], tot_loss[loss=0.2044, simple_loss=0.3499, pruned_loss=0.5719, over 1370701.32 frames.], batch size: 20, lr: 2.57e-03 +2022-05-27 14:45:23,925 INFO [train.py:823] (2/4) Epoch 4, batch 700, loss[loss=0.2015, simple_loss=0.3638, pruned_loss=0.1958, over 4679.00 frames.], tot_loss[loss=0.2031, simple_loss=0.3521, pruned_loss=0.4852, over 1376345.87 frames.], batch size: 47, lr: 2.56e-03 +2022-05-27 14:46:02,632 INFO [train.py:823] (2/4) Epoch 4, batch 750, loss[loss=0.1816, simple_loss=0.3321, pruned_loss=0.1548, over 7100.00 frames.], tot_loss[loss=0.1998, simple_loss=0.3502, pruned_loss=0.4128, over 1384779.64 frames.], batch size: 19, lr: 2.56e-03 +2022-05-27 14:46:42,013 INFO [train.py:823] (2/4) Epoch 4, batch 800, loss[loss=0.168, simple_loss=0.3086, pruned_loss=0.1368, over 7026.00 frames.], tot_loss[loss=0.197, simple_loss=0.3485, pruned_loss=0.3573, over 1386654.31 frames.], batch size: 17, lr: 2.55e-03 +2022-05-27 14:47:21,054 INFO [train.py:823] (2/4) Epoch 4, batch 850, loss[loss=0.1795, simple_loss=0.3323, pruned_loss=0.1339, over 7308.00 frames.], tot_loss[loss=0.1946, simple_loss=0.3469, pruned_loss=0.3121, over 1392688.76 frames.], batch size: 22, lr: 2.54e-03 +2022-05-27 14:47:59,991 INFO [train.py:823] (2/4) Epoch 4, batch 900, loss[loss=0.1669, simple_loss=0.3107, pruned_loss=0.1151, over 7196.00 frames.], tot_loss[loss=0.1929, simple_loss=0.3458, pruned_loss=0.2777, over 1389173.62 frames.], batch size: 18, lr: 2.54e-03 +2022-05-27 14:48:51,281 INFO [train.py:823] (2/4) Epoch 5, batch 0, loss[loss=0.1873, simple_loss=0.3428, pruned_loss=0.1591, over 7347.00 frames.], tot_loss[loss=0.1873, simple_loss=0.3428, pruned_loss=0.1591, over 7347.00 frames.], batch size: 23, lr: 2.44e-03 +2022-05-27 14:49:30,543 INFO [train.py:823] (2/4) Epoch 5, batch 50, loss[loss=0.1975, simple_loss=0.3635, pruned_loss=0.1576, over 6981.00 frames.], tot_loss[loss=0.184, simple_loss=0.3384, pruned_loss=0.1473, over 325777.98 frames.], batch size: 26, lr: 2.44e-03 +2022-05-27 14:50:10,165 INFO [train.py:823] (2/4) Epoch 5, batch 100, loss[loss=0.194, simple_loss=0.356, pruned_loss=0.1604, over 7107.00 frames.], tot_loss[loss=0.1821, simple_loss=0.3357, pruned_loss=0.1421, over 570770.75 frames.], batch size: 20, lr: 2.43e-03 +2022-05-27 14:50:49,528 INFO [train.py:823] (2/4) Epoch 5, batch 150, loss[loss=0.1813, simple_loss=0.3326, pruned_loss=0.1497, over 7377.00 frames.], tot_loss[loss=0.1819, simple_loss=0.3352, pruned_loss=0.143, over 758418.80 frames.], batch size: 20, lr: 2.42e-03 +2022-05-27 14:51:28,466 INFO [train.py:823] (2/4) Epoch 5, batch 200, loss[loss=0.2047, simple_loss=0.3758, pruned_loss=0.1681, over 7182.00 frames.], tot_loss[loss=0.1825, simple_loss=0.3365, pruned_loss=0.1431, over 904979.58 frames.], batch size: 22, lr: 2.42e-03 +2022-05-27 14:52:07,854 INFO [train.py:823] (2/4) Epoch 5, batch 250, loss[loss=0.1945, simple_loss=0.3559, pruned_loss=0.1655, over 5195.00 frames.], tot_loss[loss=0.182, simple_loss=0.3354, pruned_loss=0.1424, over 1013869.42 frames.], batch size: 46, lr: 2.41e-03 +2022-05-27 14:52:46,744 INFO [train.py:823] (2/4) Epoch 5, batch 300, loss[loss=0.1887, simple_loss=0.3503, pruned_loss=0.1353, over 7159.00 frames.], tot_loss[loss=0.1817, simple_loss=0.3351, pruned_loss=0.1412, over 1105046.93 frames.], batch size: 23, lr: 2.41e-03 +2022-05-27 14:53:26,208 INFO [train.py:823] (2/4) Epoch 5, batch 350, loss[loss=0.1907, simple_loss=0.3519, pruned_loss=0.1473, over 7230.00 frames.], tot_loss[loss=0.1818, simple_loss=0.3355, pruned_loss=0.1402, over 1174667.34 frames.], batch size: 24, lr: 2.40e-03 +2022-05-27 14:54:05,607 INFO [train.py:823] (2/4) Epoch 5, batch 400, loss[loss=0.1585, simple_loss=0.2952, pruned_loss=0.109, over 7440.00 frames.], tot_loss[loss=0.1816, simple_loss=0.3353, pruned_loss=0.1392, over 1234218.16 frames.], batch size: 18, lr: 2.39e-03 +2022-05-27 14:54:45,166 INFO [train.py:823] (2/4) Epoch 5, batch 450, loss[loss=0.1957, simple_loss=0.3619, pruned_loss=0.1473, over 7049.00 frames.], tot_loss[loss=0.1801, simple_loss=0.3332, pruned_loss=0.1356, over 1269117.39 frames.], batch size: 26, lr: 2.39e-03 +2022-05-27 14:55:24,716 INFO [train.py:823] (2/4) Epoch 5, batch 500, loss[loss=0.1698, simple_loss=0.3179, pruned_loss=0.108, over 7198.00 frames.], tot_loss[loss=0.1799, simple_loss=0.333, pruned_loss=0.1338, over 1304777.07 frames.], batch size: 19, lr: 2.38e-03 +2022-05-27 14:56:03,753 INFO [train.py:823] (2/4) Epoch 5, batch 550, loss[loss=0.1796, simple_loss=0.3373, pruned_loss=0.1096, over 6920.00 frames.], tot_loss[loss=0.1801, simple_loss=0.3336, pruned_loss=0.1335, over 1330533.32 frames.], batch size: 29, lr: 2.38e-03 +2022-05-27 14:56:42,878 INFO [train.py:823] (2/4) Epoch 5, batch 600, loss[loss=0.1837, simple_loss=0.3411, pruned_loss=0.131, over 6707.00 frames.], tot_loss[loss=0.1805, simple_loss=0.3341, pruned_loss=0.1343, over 1348757.80 frames.], batch size: 34, lr: 2.37e-03 +2022-05-27 14:57:22,168 INFO [train.py:823] (2/4) Epoch 5, batch 650, loss[loss=0.1691, simple_loss=0.3182, pruned_loss=0.1004, over 7283.00 frames.], tot_loss[loss=0.1796, simple_loss=0.3328, pruned_loss=0.1321, over 1364094.35 frames.], batch size: 21, lr: 2.37e-03 +2022-05-27 14:58:00,867 INFO [train.py:823] (2/4) Epoch 5, batch 700, loss[loss=0.1785, simple_loss=0.3324, pruned_loss=0.1228, over 6978.00 frames.], tot_loss[loss=0.1792, simple_loss=0.3322, pruned_loss=0.1311, over 1373657.59 frames.], batch size: 26, lr: 2.36e-03 +2022-05-27 14:58:39,876 INFO [train.py:823] (2/4) Epoch 5, batch 750, loss[loss=0.1898, simple_loss=0.3534, pruned_loss=0.1312, over 7162.00 frames.], tot_loss[loss=0.1792, simple_loss=0.3325, pruned_loss=0.1299, over 1381397.83 frames.], batch size: 23, lr: 2.35e-03 +2022-05-27 14:59:18,695 INFO [train.py:823] (2/4) Epoch 5, batch 800, loss[loss=0.2121, simple_loss=0.3884, pruned_loss=0.1787, over 4933.00 frames.], tot_loss[loss=0.1788, simple_loss=0.3317, pruned_loss=0.1296, over 1391605.94 frames.], batch size: 47, lr: 2.35e-03 +2022-05-27 14:59:59,089 INFO [train.py:823] (2/4) Epoch 5, batch 850, loss[loss=0.168, simple_loss=0.3123, pruned_loss=0.1187, over 7150.00 frames.], tot_loss[loss=0.1786, simple_loss=0.3315, pruned_loss=0.1287, over 1398602.69 frames.], batch size: 17, lr: 2.34e-03 +2022-05-27 15:00:37,922 INFO [train.py:823] (2/4) Epoch 5, batch 900, loss[loss=0.1941, simple_loss=0.3603, pruned_loss=0.1393, over 6875.00 frames.], tot_loss[loss=0.1788, simple_loss=0.3319, pruned_loss=0.1283, over 1399814.47 frames.], batch size: 29, lr: 2.34e-03 +2022-05-27 15:01:33,801 INFO [train.py:823] (2/4) Epoch 6, batch 0, loss[loss=0.1955, simple_loss=0.3618, pruned_loss=0.1465, over 7169.00 frames.], tot_loss[loss=0.1955, simple_loss=0.3618, pruned_loss=0.1465, over 7169.00 frames.], batch size: 22, lr: 2.24e-03 +2022-05-27 15:02:12,506 INFO [train.py:823] (2/4) Epoch 6, batch 50, loss[loss=0.177, simple_loss=0.3308, pruned_loss=0.1162, over 7182.00 frames.], tot_loss[loss=0.1749, simple_loss=0.326, pruned_loss=0.1193, over 319668.76 frames.], batch size: 21, lr: 2.23e-03 +2022-05-27 15:02:52,427 INFO [train.py:823] (2/4) Epoch 6, batch 100, loss[loss=0.1757, simple_loss=0.3269, pruned_loss=0.1223, over 7242.00 frames.], tot_loss[loss=0.1706, simple_loss=0.3184, pruned_loss=0.1135, over 566815.89 frames.], batch size: 24, lr: 2.23e-03 +2022-05-27 15:03:32,869 INFO [train.py:823] (2/4) Epoch 6, batch 150, loss[loss=0.1863, simple_loss=0.3465, pruned_loss=0.1309, over 7291.00 frames.], tot_loss[loss=0.1733, simple_loss=0.3233, pruned_loss=0.1165, over 756522.81 frames.], batch size: 19, lr: 2.22e-03 +2022-05-27 15:04:12,228 INFO [train.py:823] (2/4) Epoch 6, batch 200, loss[loss=0.197, simple_loss=0.3607, pruned_loss=0.166, over 7216.00 frames.], tot_loss[loss=0.1732, simple_loss=0.3232, pruned_loss=0.1165, over 902146.81 frames.], batch size: 25, lr: 2.22e-03 +2022-05-27 15:04:50,834 INFO [train.py:823] (2/4) Epoch 6, batch 250, loss[loss=0.175, simple_loss=0.3271, pruned_loss=0.115, over 6592.00 frames.], tot_loss[loss=0.1737, simple_loss=0.3242, pruned_loss=0.1161, over 1018227.17 frames.], batch size: 34, lr: 2.21e-03 +2022-05-27 15:05:29,770 INFO [train.py:823] (2/4) Epoch 6, batch 300, loss[loss=0.1715, simple_loss=0.3207, pruned_loss=0.1121, over 7192.00 frames.], tot_loss[loss=0.1735, simple_loss=0.3239, pruned_loss=0.1152, over 1108157.38 frames.], batch size: 20, lr: 2.21e-03 +2022-05-27 15:06:08,841 INFO [train.py:823] (2/4) Epoch 6, batch 350, loss[loss=0.1593, simple_loss=0.2975, pruned_loss=0.1055, over 7090.00 frames.], tot_loss[loss=0.1729, simple_loss=0.323, pruned_loss=0.1143, over 1179998.29 frames.], batch size: 18, lr: 2.20e-03 +2022-05-27 15:06:48,061 INFO [train.py:823] (2/4) Epoch 6, batch 400, loss[loss=0.1786, simple_loss=0.3314, pruned_loss=0.1289, over 7183.00 frames.], tot_loss[loss=0.172, simple_loss=0.3214, pruned_loss=0.113, over 1235396.85 frames.], batch size: 22, lr: 2.19e-03 +2022-05-27 15:07:26,433 INFO [train.py:823] (2/4) Epoch 6, batch 450, loss[loss=0.1631, simple_loss=0.3059, pruned_loss=0.1014, over 6354.00 frames.], tot_loss[loss=0.172, simple_loss=0.3215, pruned_loss=0.1124, over 1268187.94 frames.], batch size: 34, lr: 2.19e-03 +2022-05-27 15:08:05,568 INFO [train.py:823] (2/4) Epoch 6, batch 500, loss[loss=0.1816, simple_loss=0.3361, pruned_loss=0.136, over 7149.00 frames.], tot_loss[loss=0.1732, simple_loss=0.3237, pruned_loss=0.1138, over 1298598.74 frames.], batch size: 23, lr: 2.18e-03 +2022-05-27 15:08:44,678 INFO [train.py:823] (2/4) Epoch 6, batch 550, loss[loss=0.1506, simple_loss=0.2853, pruned_loss=0.07928, over 7096.00 frames.], tot_loss[loss=0.1731, simple_loss=0.3234, pruned_loss=0.1136, over 1325623.31 frames.], batch size: 18, lr: 2.18e-03 +2022-05-27 15:09:24,203 INFO [train.py:823] (2/4) Epoch 6, batch 600, loss[loss=0.1505, simple_loss=0.2835, pruned_loss=0.08775, over 7102.00 frames.], tot_loss[loss=0.1725, simple_loss=0.3224, pruned_loss=0.1135, over 1343726.79 frames.], batch size: 18, lr: 2.17e-03 +2022-05-27 15:10:02,604 INFO [train.py:823] (2/4) Epoch 6, batch 650, loss[loss=0.1464, simple_loss=0.2794, pruned_loss=0.06645, over 7390.00 frames.], tot_loss[loss=0.1713, simple_loss=0.3203, pruned_loss=0.1116, over 1360163.13 frames.], batch size: 19, lr: 2.17e-03 +2022-05-27 15:10:41,902 INFO [train.py:823] (2/4) Epoch 6, batch 700, loss[loss=0.1556, simple_loss=0.295, pruned_loss=0.0814, over 7201.00 frames.], tot_loss[loss=0.171, simple_loss=0.32, pruned_loss=0.1103, over 1375225.59 frames.], batch size: 19, lr: 2.16e-03 +2022-05-27 15:11:21,002 INFO [train.py:823] (2/4) Epoch 6, batch 750, loss[loss=0.1531, simple_loss=0.2886, pruned_loss=0.08754, over 7091.00 frames.], tot_loss[loss=0.1712, simple_loss=0.3204, pruned_loss=0.1104, over 1382504.40 frames.], batch size: 19, lr: 2.16e-03 +2022-05-27 15:12:00,684 INFO [train.py:823] (2/4) Epoch 6, batch 800, loss[loss=0.1592, simple_loss=0.2962, pruned_loss=0.1117, over 7023.00 frames.], tot_loss[loss=0.1708, simple_loss=0.3196, pruned_loss=0.1099, over 1388145.10 frames.], batch size: 16, lr: 2.15e-03 +2022-05-27 15:12:39,779 INFO [train.py:823] (2/4) Epoch 6, batch 850, loss[loss=0.1596, simple_loss=0.2982, pruned_loss=0.1049, over 6782.00 frames.], tot_loss[loss=0.1708, simple_loss=0.3197, pruned_loss=0.1101, over 1391565.28 frames.], batch size: 15, lr: 2.15e-03 +2022-05-27 15:13:19,393 INFO [train.py:823] (2/4) Epoch 6, batch 900, loss[loss=0.1397, simple_loss=0.265, pruned_loss=0.0719, over 7291.00 frames.], tot_loss[loss=0.1705, simple_loss=0.319, pruned_loss=0.1097, over 1395855.95 frames.], batch size: 17, lr: 2.14e-03 +2022-05-27 15:14:12,812 INFO [train.py:823] (2/4) Epoch 7, batch 0, loss[loss=0.1575, simple_loss=0.2973, pruned_loss=0.08832, over 7105.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2973, pruned_loss=0.08832, over 7105.00 frames.], batch size: 19, lr: 2.05e-03 +2022-05-27 15:14:52,618 INFO [train.py:823] (2/4) Epoch 7, batch 50, loss[loss=0.1737, simple_loss=0.3214, pruned_loss=0.1296, over 7257.00 frames.], tot_loss[loss=0.1644, simple_loss=0.3086, pruned_loss=0.1011, over 323121.85 frames.], batch size: 16, lr: 2.04e-03 +2022-05-27 15:15:31,823 INFO [train.py:823] (2/4) Epoch 7, batch 100, loss[loss=0.157, simple_loss=0.2968, pruned_loss=0.08571, over 7114.00 frames.], tot_loss[loss=0.1633, simple_loss=0.307, pruned_loss=0.09851, over 562513.12 frames.], batch size: 20, lr: 2.04e-03 +2022-05-27 15:16:10,883 INFO [train.py:823] (2/4) Epoch 7, batch 150, loss[loss=0.1704, simple_loss=0.3193, pruned_loss=0.1071, over 7370.00 frames.], tot_loss[loss=0.1654, simple_loss=0.3109, pruned_loss=0.09954, over 753308.38 frames.], batch size: 21, lr: 2.03e-03 +2022-05-27 15:16:50,158 INFO [train.py:823] (2/4) Epoch 7, batch 200, loss[loss=0.1635, simple_loss=0.3086, pruned_loss=0.09175, over 7021.00 frames.], tot_loss[loss=0.1654, simple_loss=0.3108, pruned_loss=0.09986, over 904138.85 frames.], batch size: 26, lr: 2.03e-03 +2022-05-27 15:17:29,174 INFO [train.py:823] (2/4) Epoch 7, batch 250, loss[loss=0.1715, simple_loss=0.3211, pruned_loss=0.1093, over 7304.00 frames.], tot_loss[loss=0.1655, simple_loss=0.3109, pruned_loss=0.1005, over 1019578.68 frames.], batch size: 22, lr: 2.02e-03 +2022-05-27 15:18:07,893 INFO [train.py:823] (2/4) Epoch 7, batch 300, loss[loss=0.1402, simple_loss=0.2645, pruned_loss=0.07958, over 7155.00 frames.], tot_loss[loss=0.1663, simple_loss=0.3123, pruned_loss=0.1012, over 1108496.36 frames.], batch size: 17, lr: 2.02e-03 +2022-05-27 15:18:47,544 INFO [train.py:823] (2/4) Epoch 7, batch 350, loss[loss=0.2771, simple_loss=0.3245, pruned_loss=0.1148, over 7298.00 frames.], tot_loss[loss=0.1971, simple_loss=0.3156, pruned_loss=0.106, over 1176644.06 frames.], batch size: 19, lr: 2.01e-03 +2022-05-27 15:19:26,417 INFO [train.py:823] (2/4) Epoch 7, batch 400, loss[loss=0.2886, simple_loss=0.3432, pruned_loss=0.117, over 7338.00 frames.], tot_loss[loss=0.2156, simple_loss=0.3171, pruned_loss=0.1067, over 1231171.24 frames.], batch size: 23, lr: 2.01e-03 +2022-05-27 15:20:05,961 INFO [train.py:823] (2/4) Epoch 7, batch 450, loss[loss=0.2666, simple_loss=0.3253, pruned_loss=0.1039, over 7159.00 frames.], tot_loss[loss=0.2293, simple_loss=0.3186, pruned_loss=0.1076, over 1268736.64 frames.], batch size: 22, lr: 2.00e-03 +2022-05-27 15:20:45,040 INFO [train.py:823] (2/4) Epoch 7, batch 500, loss[loss=0.2844, simple_loss=0.3375, pruned_loss=0.1157, over 7000.00 frames.], tot_loss[loss=0.2349, simple_loss=0.3174, pruned_loss=0.1046, over 1302670.44 frames.], batch size: 26, lr: 2.00e-03 +2022-05-27 15:21:24,308 INFO [train.py:823] (2/4) Epoch 7, batch 550, loss[loss=0.261, simple_loss=0.32, pruned_loss=0.101, over 6615.00 frames.], tot_loss[loss=0.2389, simple_loss=0.3159, pruned_loss=0.1027, over 1326578.84 frames.], batch size: 34, lr: 1.99e-03 +2022-05-27 15:22:03,245 INFO [train.py:823] (2/4) Epoch 7, batch 600, loss[loss=0.3152, simple_loss=0.3614, pruned_loss=0.1344, over 7370.00 frames.], tot_loss[loss=0.244, simple_loss=0.3163, pruned_loss=0.1025, over 1344739.70 frames.], batch size: 21, lr: 1.99e-03 +2022-05-27 15:22:42,702 INFO [train.py:823] (2/4) Epoch 7, batch 650, loss[loss=0.2802, simple_loss=0.3411, pruned_loss=0.1096, over 7114.00 frames.], tot_loss[loss=0.2472, simple_loss=0.3162, pruned_loss=0.1019, over 1361989.13 frames.], batch size: 20, lr: 1.98e-03 +2022-05-27 15:23:24,649 INFO [train.py:823] (2/4) Epoch 7, batch 700, loss[loss=0.2296, simple_loss=0.3016, pruned_loss=0.07882, over 7090.00 frames.], tot_loss[loss=0.25, simple_loss=0.3165, pruned_loss=0.1017, over 1371569.08 frames.], batch size: 18, lr: 1.98e-03 +2022-05-27 15:24:03,839 INFO [train.py:823] (2/4) Epoch 7, batch 750, loss[loss=0.2405, simple_loss=0.3106, pruned_loss=0.08516, over 6974.00 frames.], tot_loss[loss=0.25, simple_loss=0.3152, pruned_loss=0.1001, over 1378844.82 frames.], batch size: 26, lr: 1.97e-03 +2022-05-27 15:24:43,860 INFO [train.py:823] (2/4) Epoch 7, batch 800, loss[loss=0.2849, simple_loss=0.3395, pruned_loss=0.1152, over 7195.00 frames.], tot_loss[loss=0.253, simple_loss=0.3169, pruned_loss=0.1005, over 1388951.86 frames.], batch size: 19, lr: 1.97e-03 +2022-05-27 15:25:23,418 INFO [train.py:823] (2/4) Epoch 7, batch 850, loss[loss=0.2795, simple_loss=0.34, pruned_loss=0.1095, over 7379.00 frames.], tot_loss[loss=0.2548, simple_loss=0.3181, pruned_loss=0.1003, over 1389762.04 frames.], batch size: 21, lr: 1.97e-03 +2022-05-27 15:26:02,043 INFO [train.py:823] (2/4) Epoch 7, batch 900, loss[loss=0.2667, simple_loss=0.3352, pruned_loss=0.09909, over 7031.00 frames.], tot_loss[loss=0.2555, simple_loss=0.3182, pruned_loss=0.09995, over 1391207.29 frames.], batch size: 29, lr: 1.96e-03 +2022-05-27 15:26:53,907 INFO [train.py:823] (2/4) Epoch 8, batch 0, loss[loss=0.2404, simple_loss=0.3032, pruned_loss=0.08877, over 7424.00 frames.], tot_loss[loss=0.2404, simple_loss=0.3032, pruned_loss=0.08877, over 7424.00 frames.], batch size: 22, lr: 1.88e-03 +2022-05-27 15:27:33,952 INFO [train.py:823] (2/4) Epoch 8, batch 50, loss[loss=0.2407, simple_loss=0.3105, pruned_loss=0.08543, over 7234.00 frames.], tot_loss[loss=0.2478, simple_loss=0.3126, pruned_loss=0.09154, over 320948.90 frames.], batch size: 24, lr: 1.87e-03 +2022-05-27 15:28:13,386 INFO [train.py:823] (2/4) Epoch 8, batch 100, loss[loss=0.1918, simple_loss=0.2573, pruned_loss=0.06317, over 7430.00 frames.], tot_loss[loss=0.2526, simple_loss=0.3162, pruned_loss=0.09451, over 565085.77 frames.], batch size: 18, lr: 1.87e-03 +2022-05-27 15:28:52,007 INFO [train.py:823] (2/4) Epoch 8, batch 150, loss[loss=0.2504, simple_loss=0.3163, pruned_loss=0.09221, over 7272.00 frames.], tot_loss[loss=0.2502, simple_loss=0.3131, pruned_loss=0.09366, over 753769.36 frames.], batch size: 20, lr: 1.86e-03 +2022-05-27 15:29:31,450 INFO [train.py:823] (2/4) Epoch 8, batch 200, loss[loss=0.2202, simple_loss=0.2732, pruned_loss=0.08366, over 6997.00 frames.], tot_loss[loss=0.2482, simple_loss=0.3118, pruned_loss=0.09231, over 898683.11 frames.], batch size: 16, lr: 1.86e-03 +2022-05-27 15:30:10,535 INFO [train.py:823] (2/4) Epoch 8, batch 250, loss[loss=0.2411, simple_loss=0.3078, pruned_loss=0.08724, over 7144.00 frames.], tot_loss[loss=0.2469, simple_loss=0.3107, pruned_loss=0.09154, over 1012843.27 frames.], batch size: 23, lr: 1.85e-03 +2022-05-27 15:30:49,898 INFO [train.py:823] (2/4) Epoch 8, batch 300, loss[loss=0.2213, simple_loss=0.2879, pruned_loss=0.07732, over 7394.00 frames.], tot_loss[loss=0.2452, simple_loss=0.31, pruned_loss=0.09024, over 1106064.18 frames.], batch size: 19, lr: 1.85e-03 +2022-05-27 15:31:28,783 INFO [train.py:823] (2/4) Epoch 8, batch 350, loss[loss=0.2177, simple_loss=0.2826, pruned_loss=0.07643, over 7037.00 frames.], tot_loss[loss=0.2447, simple_loss=0.3098, pruned_loss=0.08981, over 1165882.75 frames.], batch size: 16, lr: 1.85e-03 +2022-05-27 15:32:08,184 INFO [train.py:823] (2/4) Epoch 8, batch 400, loss[loss=0.2851, simple_loss=0.3399, pruned_loss=0.1151, over 7134.00 frames.], tot_loss[loss=0.2457, simple_loss=0.3113, pruned_loss=0.09007, over 1220963.65 frames.], batch size: 22, lr: 1.84e-03 +2022-05-27 15:32:47,368 INFO [train.py:823] (2/4) Epoch 8, batch 450, loss[loss=0.2609, simple_loss=0.326, pruned_loss=0.09797, over 6564.00 frames.], tot_loss[loss=0.2468, simple_loss=0.3124, pruned_loss=0.09054, over 1264634.54 frames.], batch size: 34, lr: 1.84e-03 +2022-05-27 15:33:26,810 INFO [train.py:823] (2/4) Epoch 8, batch 500, loss[loss=0.2181, simple_loss=0.2824, pruned_loss=0.07688, over 7297.00 frames.], tot_loss[loss=0.2452, simple_loss=0.3114, pruned_loss=0.08948, over 1301526.42 frames.], batch size: 17, lr: 1.83e-03 +2022-05-27 15:34:05,408 INFO [train.py:823] (2/4) Epoch 8, batch 550, loss[loss=0.2897, simple_loss=0.3394, pruned_loss=0.12, over 7165.00 frames.], tot_loss[loss=0.2466, simple_loss=0.313, pruned_loss=0.09015, over 1325931.04 frames.], batch size: 22, lr: 1.83e-03 +2022-05-27 15:34:44,873 INFO [train.py:823] (2/4) Epoch 8, batch 600, loss[loss=0.2074, simple_loss=0.2823, pruned_loss=0.0663, over 7030.00 frames.], tot_loss[loss=0.2473, simple_loss=0.3133, pruned_loss=0.09061, over 1344354.35 frames.], batch size: 17, lr: 1.82e-03 +2022-05-27 15:35:24,023 INFO [train.py:823] (2/4) Epoch 8, batch 650, loss[loss=0.2423, simple_loss=0.3227, pruned_loss=0.08095, over 6960.00 frames.], tot_loss[loss=0.2471, simple_loss=0.3135, pruned_loss=0.09038, over 1362258.09 frames.], batch size: 26, lr: 1.82e-03 +2022-05-27 15:36:03,208 INFO [train.py:823] (2/4) Epoch 8, batch 700, loss[loss=0.2131, simple_loss=0.2822, pruned_loss=0.072, over 7291.00 frames.], tot_loss[loss=0.2453, simple_loss=0.3125, pruned_loss=0.08908, over 1379898.94 frames.], batch size: 19, lr: 1.82e-03 +2022-05-27 15:36:42,109 INFO [train.py:823] (2/4) Epoch 8, batch 750, loss[loss=0.2374, simple_loss=0.2996, pruned_loss=0.08758, over 7097.00 frames.], tot_loss[loss=0.2446, simple_loss=0.3121, pruned_loss=0.08857, over 1387250.88 frames.], batch size: 18, lr: 1.81e-03 +2022-05-27 15:37:21,571 INFO [train.py:823] (2/4) Epoch 8, batch 800, loss[loss=0.2639, simple_loss=0.3309, pruned_loss=0.09847, over 5368.00 frames.], tot_loss[loss=0.2436, simple_loss=0.3113, pruned_loss=0.08795, over 1388112.29 frames.], batch size: 46, lr: 1.81e-03 +2022-05-27 15:38:00,635 INFO [train.py:823] (2/4) Epoch 8, batch 850, loss[loss=0.2519, simple_loss=0.3259, pruned_loss=0.08893, over 7194.00 frames.], tot_loss[loss=0.2422, simple_loss=0.31, pruned_loss=0.08725, over 1390668.10 frames.], batch size: 20, lr: 1.80e-03 +2022-05-27 15:38:39,702 INFO [train.py:823] (2/4) Epoch 8, batch 900, loss[loss=0.2299, simple_loss=0.2965, pruned_loss=0.08165, over 7099.00 frames.], tot_loss[loss=0.2424, simple_loss=0.3102, pruned_loss=0.08729, over 1394773.75 frames.], batch size: 18, lr: 1.80e-03 +2022-05-27 15:39:31,059 INFO [train.py:823] (2/4) Epoch 9, batch 0, loss[loss=0.2626, simple_loss=0.3415, pruned_loss=0.09181, over 7187.00 frames.], tot_loss[loss=0.2626, simple_loss=0.3415, pruned_loss=0.09181, over 7187.00 frames.], batch size: 21, lr: 1.72e-03 +2022-05-27 15:40:10,165 INFO [train.py:823] (2/4) Epoch 9, batch 50, loss[loss=0.193, simple_loss=0.2729, pruned_loss=0.05661, over 7392.00 frames.], tot_loss[loss=0.2361, simple_loss=0.3065, pruned_loss=0.08289, over 319610.06 frames.], batch size: 19, lr: 1.72e-03 +2022-05-27 15:40:49,210 INFO [train.py:823] (2/4) Epoch 9, batch 100, loss[loss=0.2099, simple_loss=0.2794, pruned_loss=0.07027, over 7293.00 frames.], tot_loss[loss=0.2342, simple_loss=0.3054, pruned_loss=0.08148, over 562899.83 frames.], batch size: 19, lr: 1.71e-03 +2022-05-27 15:41:28,205 INFO [train.py:823] (2/4) Epoch 9, batch 150, loss[loss=0.2239, simple_loss=0.3049, pruned_loss=0.07149, over 7100.00 frames.], tot_loss[loss=0.2359, simple_loss=0.3069, pruned_loss=0.08245, over 752579.04 frames.], batch size: 19, lr: 1.71e-03 +2022-05-27 15:42:06,986 INFO [train.py:823] (2/4) Epoch 9, batch 200, loss[loss=0.2451, simple_loss=0.3218, pruned_loss=0.08416, over 7282.00 frames.], tot_loss[loss=0.237, simple_loss=0.308, pruned_loss=0.08301, over 896160.55 frames.], batch size: 20, lr: 1.71e-03 +2022-05-27 15:42:46,311 INFO [train.py:823] (2/4) Epoch 9, batch 250, loss[loss=0.2078, simple_loss=0.2959, pruned_loss=0.05986, over 7193.00 frames.], tot_loss[loss=0.2347, simple_loss=0.3064, pruned_loss=0.0815, over 1011967.83 frames.], batch size: 20, lr: 1.70e-03 +2022-05-27 15:43:24,861 INFO [train.py:823] (2/4) Epoch 9, batch 300, loss[loss=0.2097, simple_loss=0.2869, pruned_loss=0.06623, over 7189.00 frames.], tot_loss[loss=0.2332, simple_loss=0.3052, pruned_loss=0.08058, over 1103271.84 frames.], batch size: 18, lr: 1.70e-03 +2022-05-27 15:44:04,324 INFO [train.py:823] (2/4) Epoch 9, batch 350, loss[loss=0.1934, simple_loss=0.2596, pruned_loss=0.06358, over 7288.00 frames.], tot_loss[loss=0.231, simple_loss=0.3036, pruned_loss=0.07921, over 1173047.07 frames.], batch size: 17, lr: 1.70e-03 +2022-05-27 15:44:43,705 INFO [train.py:823] (2/4) Epoch 9, batch 400, loss[loss=0.2116, simple_loss=0.3012, pruned_loss=0.06106, over 7307.00 frames.], tot_loss[loss=0.2301, simple_loss=0.3024, pruned_loss=0.07888, over 1228981.97 frames.], batch size: 22, lr: 1.69e-03 +2022-05-27 15:45:28,939 INFO [train.py:823] (2/4) Epoch 9, batch 450, loss[loss=0.2174, simple_loss=0.2991, pruned_loss=0.06782, over 7187.00 frames.], tot_loss[loss=0.2334, simple_loss=0.3054, pruned_loss=0.08068, over 1270122.84 frames.], batch size: 19, lr: 1.69e-03 +2022-05-27 15:46:08,970 INFO [train.py:823] (2/4) Epoch 9, batch 500, loss[loss=0.2377, simple_loss=0.3051, pruned_loss=0.08518, over 7241.00 frames.], tot_loss[loss=0.2347, simple_loss=0.3062, pruned_loss=0.08155, over 1304026.77 frames.], batch size: 24, lr: 1.68e-03 +2022-05-27 15:46:48,249 INFO [train.py:823] (2/4) Epoch 9, batch 550, loss[loss=0.2414, simple_loss=0.3082, pruned_loss=0.08728, over 7187.00 frames.], tot_loss[loss=0.2356, simple_loss=0.3068, pruned_loss=0.08217, over 1333424.16 frames.], batch size: 19, lr: 1.68e-03 +2022-05-27 15:47:28,246 INFO [train.py:823] (2/4) Epoch 9, batch 600, loss[loss=0.2233, simple_loss=0.291, pruned_loss=0.07777, over 7151.00 frames.], tot_loss[loss=0.2353, simple_loss=0.3064, pruned_loss=0.08205, over 1354622.41 frames.], batch size: 17, lr: 1.68e-03 +2022-05-27 15:48:07,786 INFO [train.py:823] (2/4) Epoch 9, batch 650, loss[loss=0.2662, simple_loss=0.3301, pruned_loss=0.1011, over 6942.00 frames.], tot_loss[loss=0.2326, simple_loss=0.3047, pruned_loss=0.08029, over 1368316.57 frames.], batch size: 29, lr: 1.67e-03 +2022-05-27 15:48:46,358 INFO [train.py:823] (2/4) Epoch 9, batch 700, loss[loss=0.2264, simple_loss=0.306, pruned_loss=0.07343, over 7292.00 frames.], tot_loss[loss=0.233, simple_loss=0.3049, pruned_loss=0.08052, over 1376692.58 frames.], batch size: 22, lr: 1.67e-03 +2022-05-27 15:49:25,433 INFO [train.py:823] (2/4) Epoch 9, batch 750, loss[loss=0.2173, simple_loss=0.2889, pruned_loss=0.07289, over 7197.00 frames.], tot_loss[loss=0.2341, simple_loss=0.306, pruned_loss=0.08112, over 1387275.32 frames.], batch size: 18, lr: 1.67e-03 +2022-05-27 15:50:03,860 INFO [train.py:823] (2/4) Epoch 9, batch 800, loss[loss=0.2398, simple_loss=0.3128, pruned_loss=0.08345, over 7097.00 frames.], tot_loss[loss=0.2347, simple_loss=0.3069, pruned_loss=0.08123, over 1388666.32 frames.], batch size: 19, lr: 1.66e-03 +2022-05-27 15:50:43,426 INFO [train.py:823] (2/4) Epoch 9, batch 850, loss[loss=0.2018, simple_loss=0.2797, pruned_loss=0.06198, over 6816.00 frames.], tot_loss[loss=0.234, simple_loss=0.3064, pruned_loss=0.08082, over 1397585.60 frames.], batch size: 15, lr: 1.66e-03 +2022-05-27 15:51:23,348 INFO [train.py:823] (2/4) Epoch 9, batch 900, loss[loss=0.2057, simple_loss=0.2766, pruned_loss=0.06738, over 6791.00 frames.], tot_loss[loss=0.2335, simple_loss=0.3057, pruned_loss=0.08063, over 1399049.05 frames.], batch size: 15, lr: 1.65e-03 +2022-05-27 15:52:14,257 INFO [train.py:823] (2/4) Epoch 10, batch 0, loss[loss=0.2053, simple_loss=0.2826, pruned_loss=0.06402, over 7108.00 frames.], tot_loss[loss=0.2053, simple_loss=0.2826, pruned_loss=0.06402, over 7108.00 frames.], batch size: 20, lr: 1.59e-03 +2022-05-27 15:52:52,870 INFO [train.py:823] (2/4) Epoch 10, batch 50, loss[loss=0.1732, simple_loss=0.2562, pruned_loss=0.04512, over 7030.00 frames.], tot_loss[loss=0.2243, simple_loss=0.3007, pruned_loss=0.07392, over 318926.07 frames.], batch size: 17, lr: 1.58e-03 +2022-05-27 15:53:32,111 INFO [train.py:823] (2/4) Epoch 10, batch 100, loss[loss=0.213, simple_loss=0.2864, pruned_loss=0.06986, over 7391.00 frames.], tot_loss[loss=0.2252, simple_loss=0.2999, pruned_loss=0.07521, over 559343.44 frames.], batch size: 20, lr: 1.58e-03 +2022-05-27 15:54:10,835 INFO [train.py:823] (2/4) Epoch 10, batch 150, loss[loss=0.2518, simple_loss=0.3187, pruned_loss=0.09247, over 7288.00 frames.], tot_loss[loss=0.2262, simple_loss=0.3018, pruned_loss=0.0753, over 749687.64 frames.], batch size: 20, lr: 1.58e-03 +2022-05-27 15:54:50,491 INFO [train.py:823] (2/4) Epoch 10, batch 200, loss[loss=0.1894, simple_loss=0.28, pruned_loss=0.04938, over 7281.00 frames.], tot_loss[loss=0.2224, simple_loss=0.2984, pruned_loss=0.07324, over 901912.47 frames.], batch size: 21, lr: 1.57e-03 +2022-05-27 15:55:29,312 INFO [train.py:823] (2/4) Epoch 10, batch 250, loss[loss=0.2794, simple_loss=0.339, pruned_loss=0.1099, over 7371.00 frames.], tot_loss[loss=0.2227, simple_loss=0.2991, pruned_loss=0.07315, over 1017878.55 frames.], batch size: 21, lr: 1.57e-03 +2022-05-27 15:56:08,389 INFO [train.py:823] (2/4) Epoch 10, batch 300, loss[loss=0.2147, simple_loss=0.3024, pruned_loss=0.06348, over 7012.00 frames.], tot_loss[loss=0.223, simple_loss=0.2992, pruned_loss=0.07347, over 1108642.46 frames.], batch size: 26, lr: 1.57e-03 +2022-05-27 15:56:47,618 INFO [train.py:823] (2/4) Epoch 10, batch 350, loss[loss=0.226, simple_loss=0.2876, pruned_loss=0.08221, over 7211.00 frames.], tot_loss[loss=0.2241, simple_loss=0.3003, pruned_loss=0.07399, over 1175171.86 frames.], batch size: 16, lr: 1.56e-03 +2022-05-27 15:57:26,967 INFO [train.py:823] (2/4) Epoch 10, batch 400, loss[loss=0.2431, simple_loss=0.3101, pruned_loss=0.08801, over 7104.00 frames.], tot_loss[loss=0.2258, simple_loss=0.3017, pruned_loss=0.07497, over 1224898.10 frames.], batch size: 19, lr: 1.56e-03 +2022-05-27 15:58:06,040 INFO [train.py:823] (2/4) Epoch 10, batch 450, loss[loss=0.2341, simple_loss=0.3145, pruned_loss=0.07691, over 7280.00 frames.], tot_loss[loss=0.2259, simple_loss=0.3015, pruned_loss=0.07518, over 1265972.38 frames.], batch size: 20, lr: 1.56e-03 +2022-05-27 15:58:45,314 INFO [train.py:823] (2/4) Epoch 10, batch 500, loss[loss=0.2329, simple_loss=0.3061, pruned_loss=0.07992, over 7279.00 frames.], tot_loss[loss=0.2245, simple_loss=0.3003, pruned_loss=0.07434, over 1298733.51 frames.], batch size: 20, lr: 1.55e-03 +2022-05-27 15:59:23,993 INFO [train.py:823] (2/4) Epoch 10, batch 550, loss[loss=0.1851, simple_loss=0.2628, pruned_loss=0.0537, over 7092.00 frames.], tot_loss[loss=0.2245, simple_loss=0.2999, pruned_loss=0.07454, over 1328885.31 frames.], batch size: 18, lr: 1.55e-03 +2022-05-27 16:00:03,423 INFO [train.py:823] (2/4) Epoch 10, batch 600, loss[loss=0.1818, simple_loss=0.272, pruned_loss=0.04585, over 7306.00 frames.], tot_loss[loss=0.2258, simple_loss=0.3007, pruned_loss=0.07539, over 1353717.65 frames.], batch size: 19, lr: 1.55e-03 +2022-05-27 16:00:42,556 INFO [train.py:823] (2/4) Epoch 10, batch 650, loss[loss=0.2257, simple_loss=0.3211, pruned_loss=0.06517, over 7182.00 frames.], tot_loss[loss=0.2234, simple_loss=0.2986, pruned_loss=0.07412, over 1371380.27 frames.], batch size: 21, lr: 1.54e-03 +2022-05-27 16:01:22,318 INFO [train.py:823] (2/4) Epoch 10, batch 700, loss[loss=0.287, simple_loss=0.3258, pruned_loss=0.1241, over 7017.00 frames.], tot_loss[loss=0.2235, simple_loss=0.2989, pruned_loss=0.07409, over 1384932.27 frames.], batch size: 16, lr: 1.54e-03 +2022-05-27 16:02:01,152 INFO [train.py:823] (2/4) Epoch 10, batch 750, loss[loss=0.202, simple_loss=0.2798, pruned_loss=0.06208, over 7190.00 frames.], tot_loss[loss=0.2235, simple_loss=0.2993, pruned_loss=0.07387, over 1392979.94 frames.], batch size: 18, lr: 1.54e-03 +2022-05-27 16:02:40,247 INFO [train.py:823] (2/4) Epoch 10, batch 800, loss[loss=0.2664, simple_loss=0.3392, pruned_loss=0.09683, over 7205.00 frames.], tot_loss[loss=0.2239, simple_loss=0.3003, pruned_loss=0.0737, over 1398465.85 frames.], batch size: 25, lr: 1.53e-03 +2022-05-27 16:03:19,642 INFO [train.py:823] (2/4) Epoch 10, batch 850, loss[loss=0.2189, simple_loss=0.301, pruned_loss=0.06835, over 7182.00 frames.], tot_loss[loss=0.224, simple_loss=0.3005, pruned_loss=0.07375, over 1405256.04 frames.], batch size: 22, lr: 1.53e-03 +2022-05-27 16:03:59,183 INFO [train.py:823] (2/4) Epoch 10, batch 900, loss[loss=0.2369, simple_loss=0.2971, pruned_loss=0.08834, over 7252.00 frames.], tot_loss[loss=0.2246, simple_loss=0.3003, pruned_loss=0.07447, over 1405334.13 frames.], batch size: 16, lr: 1.53e-03 +2022-05-27 16:04:53,514 INFO [train.py:823] (2/4) Epoch 11, batch 0, loss[loss=0.2369, simple_loss=0.3334, pruned_loss=0.0702, over 7108.00 frames.], tot_loss[loss=0.2369, simple_loss=0.3334, pruned_loss=0.0702, over 7108.00 frames.], batch size: 19, lr: 1.47e-03 +2022-05-27 16:05:32,629 INFO [train.py:823] (2/4) Epoch 11, batch 50, loss[loss=0.1823, simple_loss=0.2707, pruned_loss=0.04694, over 6523.00 frames.], tot_loss[loss=0.2235, simple_loss=0.301, pruned_loss=0.07301, over 323773.50 frames.], batch size: 34, lr: 1.47e-03 +2022-05-27 16:06:11,615 INFO [train.py:823] (2/4) Epoch 11, batch 100, loss[loss=0.1958, simple_loss=0.27, pruned_loss=0.06083, over 7160.00 frames.], tot_loss[loss=0.2194, simple_loss=0.2959, pruned_loss=0.07147, over 569845.91 frames.], batch size: 17, lr: 1.46e-03 +2022-05-27 16:06:50,799 INFO [train.py:823] (2/4) Epoch 11, batch 150, loss[loss=0.2594, simple_loss=0.3249, pruned_loss=0.09695, over 7240.00 frames.], tot_loss[loss=0.22, simple_loss=0.2967, pruned_loss=0.07166, over 760079.39 frames.], batch size: 24, lr: 1.46e-03 +2022-05-27 16:07:29,216 INFO [train.py:823] (2/4) Epoch 11, batch 200, loss[loss=0.2338, simple_loss=0.3036, pruned_loss=0.08201, over 7100.00 frames.], tot_loss[loss=0.2203, simple_loss=0.2976, pruned_loss=0.07146, over 900402.06 frames.], batch size: 19, lr: 1.46e-03 +2022-05-27 16:08:08,873 INFO [train.py:823] (2/4) Epoch 11, batch 250, loss[loss=0.2415, simple_loss=0.313, pruned_loss=0.08503, over 7082.00 frames.], tot_loss[loss=0.2194, simple_loss=0.2966, pruned_loss=0.07105, over 1013642.05 frames.], batch size: 18, lr: 1.45e-03 +2022-05-27 16:08:50,580 INFO [train.py:823] (2/4) Epoch 11, batch 300, loss[loss=0.2037, simple_loss=0.291, pruned_loss=0.05822, over 7182.00 frames.], tot_loss[loss=0.2183, simple_loss=0.2956, pruned_loss=0.07048, over 1104703.88 frames.], batch size: 25, lr: 1.45e-03 +2022-05-27 16:09:29,766 INFO [train.py:823] (2/4) Epoch 11, batch 350, loss[loss=0.2017, simple_loss=0.2913, pruned_loss=0.05609, over 7194.00 frames.], tot_loss[loss=0.2181, simple_loss=0.2952, pruned_loss=0.0705, over 1176270.16 frames.], batch size: 25, lr: 1.45e-03 +2022-05-27 16:10:08,850 INFO [train.py:823] (2/4) Epoch 11, batch 400, loss[loss=0.2428, simple_loss=0.3301, pruned_loss=0.07775, over 7112.00 frames.], tot_loss[loss=0.2177, simple_loss=0.2952, pruned_loss=0.07014, over 1231092.30 frames.], batch size: 19, lr: 1.44e-03 +2022-05-27 16:10:48,026 INFO [train.py:823] (2/4) Epoch 11, batch 450, loss[loss=0.2193, simple_loss=0.3054, pruned_loss=0.0666, over 7303.00 frames.], tot_loss[loss=0.2176, simple_loss=0.2953, pruned_loss=0.06995, over 1269946.50 frames.], batch size: 22, lr: 1.44e-03 +2022-05-27 16:11:28,183 INFO [train.py:823] (2/4) Epoch 11, batch 500, loss[loss=0.2052, simple_loss=0.2897, pruned_loss=0.06034, over 6580.00 frames.], tot_loss[loss=0.2177, simple_loss=0.2957, pruned_loss=0.06985, over 1303685.48 frames.], batch size: 34, lr: 1.44e-03 +2022-05-27 16:12:07,642 INFO [train.py:823] (2/4) Epoch 11, batch 550, loss[loss=0.216, simple_loss=0.2959, pruned_loss=0.06803, over 7427.00 frames.], tot_loss[loss=0.2186, simple_loss=0.2969, pruned_loss=0.07011, over 1332329.18 frames.], batch size: 22, lr: 1.44e-03 +2022-05-27 16:12:46,696 INFO [train.py:823] (2/4) Epoch 11, batch 600, loss[loss=0.1855, simple_loss=0.2682, pruned_loss=0.05141, over 7382.00 frames.], tot_loss[loss=0.2188, simple_loss=0.2964, pruned_loss=0.07059, over 1350982.97 frames.], batch size: 19, lr: 1.43e-03 +2022-05-27 16:13:26,094 INFO [train.py:823] (2/4) Epoch 11, batch 650, loss[loss=0.2084, simple_loss=0.2797, pruned_loss=0.06854, over 7291.00 frames.], tot_loss[loss=0.2179, simple_loss=0.2957, pruned_loss=0.07001, over 1368264.86 frames.], batch size: 18, lr: 1.43e-03 +2022-05-27 16:14:04,487 INFO [train.py:823] (2/4) Epoch 11, batch 700, loss[loss=0.258, simple_loss=0.3118, pruned_loss=0.1021, over 7172.00 frames.], tot_loss[loss=0.2179, simple_loss=0.2959, pruned_loss=0.06998, over 1382501.83 frames.], batch size: 17, lr: 1.43e-03 +2022-05-27 16:14:45,013 INFO [train.py:823] (2/4) Epoch 11, batch 750, loss[loss=0.1547, simple_loss=0.2341, pruned_loss=0.03766, over 7298.00 frames.], tot_loss[loss=0.2162, simple_loss=0.2939, pruned_loss=0.06919, over 1392246.24 frames.], batch size: 17, lr: 1.42e-03 +2022-05-27 16:15:23,539 INFO [train.py:823] (2/4) Epoch 11, batch 800, loss[loss=0.2042, simple_loss=0.2811, pruned_loss=0.06367, over 7198.00 frames.], tot_loss[loss=0.217, simple_loss=0.2949, pruned_loss=0.06956, over 1397163.59 frames.], batch size: 19, lr: 1.42e-03 +2022-05-27 16:16:03,338 INFO [train.py:823] (2/4) Epoch 11, batch 850, loss[loss=0.2399, simple_loss=0.3227, pruned_loss=0.07855, over 7111.00 frames.], tot_loss[loss=0.2175, simple_loss=0.2953, pruned_loss=0.06983, over 1399342.56 frames.], batch size: 20, lr: 1.42e-03 +2022-05-27 16:16:42,318 INFO [train.py:823] (2/4) Epoch 11, batch 900, loss[loss=0.1927, simple_loss=0.2625, pruned_loss=0.06143, over 7224.00 frames.], tot_loss[loss=0.2192, simple_loss=0.2967, pruned_loss=0.07081, over 1399206.09 frames.], batch size: 16, lr: 1.42e-03 +2022-05-27 16:17:32,986 INFO [train.py:823] (2/4) Epoch 12, batch 0, loss[loss=0.207, simple_loss=0.2835, pruned_loss=0.06524, over 7281.00 frames.], tot_loss[loss=0.207, simple_loss=0.2835, pruned_loss=0.06524, over 7281.00 frames.], batch size: 17, lr: 1.36e-03 +2022-05-27 16:18:12,410 INFO [train.py:823] (2/4) Epoch 12, batch 50, loss[loss=0.2497, simple_loss=0.3214, pruned_loss=0.08905, over 7252.00 frames.], tot_loss[loss=0.2142, simple_loss=0.2922, pruned_loss=0.06816, over 318075.90 frames.], batch size: 24, lr: 1.36e-03 +2022-05-27 16:18:51,720 INFO [train.py:823] (2/4) Epoch 12, batch 100, loss[loss=0.208, simple_loss=0.2818, pruned_loss=0.06704, over 7164.00 frames.], tot_loss[loss=0.2128, simple_loss=0.2923, pruned_loss=0.06662, over 561951.77 frames.], batch size: 23, lr: 1.36e-03 +2022-05-27 16:19:30,774 INFO [train.py:823] (2/4) Epoch 12, batch 150, loss[loss=0.183, simple_loss=0.2714, pruned_loss=0.04728, over 7274.00 frames.], tot_loss[loss=0.2118, simple_loss=0.2918, pruned_loss=0.06589, over 753048.03 frames.], batch size: 20, lr: 1.36e-03 +2022-05-27 16:20:10,298 INFO [train.py:823] (2/4) Epoch 12, batch 200, loss[loss=0.1972, simple_loss=0.2719, pruned_loss=0.06121, over 6761.00 frames.], tot_loss[loss=0.2102, simple_loss=0.291, pruned_loss=0.06473, over 899374.89 frames.], batch size: 15, lr: 1.35e-03 +2022-05-27 16:20:49,261 INFO [train.py:823] (2/4) Epoch 12, batch 250, loss[loss=0.2412, simple_loss=0.3173, pruned_loss=0.08259, over 6973.00 frames.], tot_loss[loss=0.2098, simple_loss=0.2904, pruned_loss=0.06458, over 1016531.95 frames.], batch size: 26, lr: 1.35e-03 +2022-05-27 16:21:28,567 INFO [train.py:823] (2/4) Epoch 12, batch 300, loss[loss=0.2063, simple_loss=0.2772, pruned_loss=0.06767, over 7192.00 frames.], tot_loss[loss=0.2109, simple_loss=0.2915, pruned_loss=0.06519, over 1103425.29 frames.], batch size: 19, lr: 1.35e-03 +2022-05-27 16:22:07,689 INFO [train.py:823] (2/4) Epoch 12, batch 350, loss[loss=0.2202, simple_loss=0.2966, pruned_loss=0.07184, over 7341.00 frames.], tot_loss[loss=0.2112, simple_loss=0.2917, pruned_loss=0.06538, over 1176717.09 frames.], batch size: 23, lr: 1.35e-03 +2022-05-27 16:22:46,807 INFO [train.py:823] (2/4) Epoch 12, batch 400, loss[loss=0.2286, simple_loss=0.3069, pruned_loss=0.07517, over 6937.00 frames.], tot_loss[loss=0.2117, simple_loss=0.2918, pruned_loss=0.06576, over 1231014.68 frames.], batch size: 29, lr: 1.34e-03 +2022-05-27 16:23:26,179 INFO [train.py:823] (2/4) Epoch 12, batch 450, loss[loss=0.2324, simple_loss=0.3032, pruned_loss=0.08081, over 7383.00 frames.], tot_loss[loss=0.2118, simple_loss=0.2919, pruned_loss=0.0659, over 1273502.70 frames.], batch size: 20, lr: 1.34e-03 +2022-05-27 16:24:06,000 INFO [train.py:823] (2/4) Epoch 12, batch 500, loss[loss=0.1777, simple_loss=0.2648, pruned_loss=0.04535, over 7298.00 frames.], tot_loss[loss=0.2113, simple_loss=0.2913, pruned_loss=0.06559, over 1310997.93 frames.], batch size: 20, lr: 1.34e-03 +2022-05-27 16:24:44,937 INFO [train.py:823] (2/4) Epoch 12, batch 550, loss[loss=0.2164, simple_loss=0.2734, pruned_loss=0.07973, over 7032.00 frames.], tot_loss[loss=0.2116, simple_loss=0.2914, pruned_loss=0.06593, over 1338971.45 frames.], batch size: 17, lr: 1.34e-03 +2022-05-27 16:25:24,212 INFO [train.py:823] (2/4) Epoch 12, batch 600, loss[loss=0.1778, simple_loss=0.266, pruned_loss=0.04481, over 6827.00 frames.], tot_loss[loss=0.2107, simple_loss=0.2903, pruned_loss=0.06557, over 1359037.68 frames.], batch size: 15, lr: 1.33e-03 +2022-05-27 16:26:03,085 INFO [train.py:823] (2/4) Epoch 12, batch 650, loss[loss=0.2164, simple_loss=0.2937, pruned_loss=0.06958, over 7281.00 frames.], tot_loss[loss=0.2109, simple_loss=0.2906, pruned_loss=0.0656, over 1371423.39 frames.], batch size: 21, lr: 1.33e-03 +2022-05-27 16:26:41,910 INFO [train.py:823] (2/4) Epoch 12, batch 700, loss[loss=0.2358, simple_loss=0.3109, pruned_loss=0.08039, over 7273.00 frames.], tot_loss[loss=0.2106, simple_loss=0.2906, pruned_loss=0.06533, over 1383033.48 frames.], batch size: 20, lr: 1.33e-03 +2022-05-27 16:27:21,152 INFO [train.py:823] (2/4) Epoch 12, batch 750, loss[loss=0.2496, simple_loss=0.3348, pruned_loss=0.08222, over 7304.00 frames.], tot_loss[loss=0.212, simple_loss=0.2917, pruned_loss=0.06611, over 1389390.94 frames.], batch size: 22, lr: 1.33e-03 +2022-05-27 16:28:00,227 INFO [train.py:823] (2/4) Epoch 12, batch 800, loss[loss=0.2287, simple_loss=0.3055, pruned_loss=0.07594, over 7310.00 frames.], tot_loss[loss=0.2118, simple_loss=0.2915, pruned_loss=0.06603, over 1396278.02 frames.], batch size: 22, lr: 1.32e-03 +2022-05-27 16:28:38,846 INFO [train.py:823] (2/4) Epoch 12, batch 850, loss[loss=0.1995, simple_loss=0.2773, pruned_loss=0.06086, over 7199.00 frames.], tot_loss[loss=0.2116, simple_loss=0.2916, pruned_loss=0.06583, over 1401322.12 frames.], batch size: 18, lr: 1.32e-03 +2022-05-27 16:29:17,851 INFO [train.py:823] (2/4) Epoch 12, batch 900, loss[loss=0.2035, simple_loss=0.2898, pruned_loss=0.05856, over 7084.00 frames.], tot_loss[loss=0.211, simple_loss=0.2907, pruned_loss=0.0656, over 1397574.44 frames.], batch size: 19, lr: 1.32e-03 +2022-05-27 16:30:08,406 INFO [train.py:823] (2/4) Epoch 13, batch 0, loss[loss=0.2258, simple_loss=0.3124, pruned_loss=0.06961, over 7176.00 frames.], tot_loss[loss=0.2258, simple_loss=0.3124, pruned_loss=0.06961, over 7176.00 frames.], batch size: 22, lr: 1.27e-03 +2022-05-27 16:30:48,325 INFO [train.py:823] (2/4) Epoch 13, batch 50, loss[loss=0.1769, simple_loss=0.2606, pruned_loss=0.0466, over 7284.00 frames.], tot_loss[loss=0.2094, simple_loss=0.2891, pruned_loss=0.06482, over 317807.92 frames.], batch size: 19, lr: 1.27e-03 +2022-05-27 16:31:28,682 INFO [train.py:823] (2/4) Epoch 13, batch 100, loss[loss=0.1934, simple_loss=0.2776, pruned_loss=0.05453, over 7298.00 frames.], tot_loss[loss=0.2106, simple_loss=0.2907, pruned_loss=0.06529, over 561830.71 frames.], batch size: 18, lr: 1.27e-03 +2022-05-27 16:32:11,906 INFO [train.py:823] (2/4) Epoch 13, batch 150, loss[loss=0.196, simple_loss=0.2817, pruned_loss=0.05519, over 7391.00 frames.], tot_loss[loss=0.2082, simple_loss=0.2887, pruned_loss=0.06383, over 751829.12 frames.], batch size: 19, lr: 1.26e-03 +2022-05-27 16:32:57,608 INFO [train.py:823] (2/4) Epoch 13, batch 200, loss[loss=0.1994, simple_loss=0.2748, pruned_loss=0.06204, over 7025.00 frames.], tot_loss[loss=0.2079, simple_loss=0.2888, pruned_loss=0.06352, over 903218.45 frames.], batch size: 17, lr: 1.26e-03 +2022-05-27 16:33:37,074 INFO [train.py:823] (2/4) Epoch 13, batch 250, loss[loss=0.2306, simple_loss=0.3088, pruned_loss=0.0762, over 7165.00 frames.], tot_loss[loss=0.2081, simple_loss=0.2887, pruned_loss=0.06377, over 1016486.04 frames.], batch size: 22, lr: 1.26e-03 +2022-05-27 16:34:18,182 INFO [train.py:823] (2/4) Epoch 13, batch 300, loss[loss=0.1554, simple_loss=0.2311, pruned_loss=0.03987, over 7301.00 frames.], tot_loss[loss=0.2074, simple_loss=0.2882, pruned_loss=0.06325, over 1109945.28 frames.], batch size: 17, lr: 1.26e-03 +2022-05-27 16:34:57,107 INFO [train.py:823] (2/4) Epoch 13, batch 350, loss[loss=0.2163, simple_loss=0.2927, pruned_loss=0.06994, over 6348.00 frames.], tot_loss[loss=0.2079, simple_loss=0.2887, pruned_loss=0.06352, over 1175724.70 frames.], batch size: 34, lr: 1.26e-03 +2022-05-27 16:35:36,396 INFO [train.py:823] (2/4) Epoch 13, batch 400, loss[loss=0.2149, simple_loss=0.2966, pruned_loss=0.06663, over 6981.00 frames.], tot_loss[loss=0.2093, simple_loss=0.2898, pruned_loss=0.06443, over 1229121.95 frames.], batch size: 26, lr: 1.25e-03 +2022-05-27 16:36:15,560 INFO [train.py:823] (2/4) Epoch 13, batch 450, loss[loss=0.2023, simple_loss=0.2819, pruned_loss=0.06135, over 7037.00 frames.], tot_loss[loss=0.2092, simple_loss=0.2895, pruned_loss=0.06449, over 1267551.15 frames.], batch size: 29, lr: 1.25e-03 +2022-05-27 16:36:55,093 INFO [train.py:823] (2/4) Epoch 13, batch 500, loss[loss=0.2151, simple_loss=0.3027, pruned_loss=0.06377, over 6822.00 frames.], tot_loss[loss=0.2073, simple_loss=0.2879, pruned_loss=0.06334, over 1300796.12 frames.], batch size: 29, lr: 1.25e-03 +2022-05-27 16:37:34,272 INFO [train.py:823] (2/4) Epoch 13, batch 550, loss[loss=0.1878, simple_loss=0.2738, pruned_loss=0.05091, over 7299.00 frames.], tot_loss[loss=0.2082, simple_loss=0.2884, pruned_loss=0.06401, over 1321980.80 frames.], batch size: 19, lr: 1.25e-03 +2022-05-27 16:38:13,505 INFO [train.py:823] (2/4) Epoch 13, batch 600, loss[loss=0.1943, simple_loss=0.2872, pruned_loss=0.05072, over 7282.00 frames.], tot_loss[loss=0.2084, simple_loss=0.2891, pruned_loss=0.06388, over 1344454.51 frames.], batch size: 20, lr: 1.24e-03 +2022-05-27 16:38:53,837 INFO [train.py:823] (2/4) Epoch 13, batch 650, loss[loss=0.2087, simple_loss=0.2863, pruned_loss=0.06558, over 7200.00 frames.], tot_loss[loss=0.2072, simple_loss=0.2882, pruned_loss=0.06316, over 1360614.48 frames.], batch size: 19, lr: 1.24e-03 +2022-05-27 16:39:33,221 INFO [train.py:823] (2/4) Epoch 13, batch 700, loss[loss=0.1827, simple_loss=0.252, pruned_loss=0.05673, over 7013.00 frames.], tot_loss[loss=0.2069, simple_loss=0.2881, pruned_loss=0.06285, over 1371623.23 frames.], batch size: 17, lr: 1.24e-03 +2022-05-27 16:40:12,614 INFO [train.py:823] (2/4) Epoch 13, batch 750, loss[loss=0.1872, simple_loss=0.2775, pruned_loss=0.04847, over 6952.00 frames.], tot_loss[loss=0.2053, simple_loss=0.2875, pruned_loss=0.06151, over 1379224.31 frames.], batch size: 29, lr: 1.24e-03 +2022-05-27 16:40:51,215 INFO [train.py:823] (2/4) Epoch 13, batch 800, loss[loss=0.2418, simple_loss=0.3258, pruned_loss=0.07893, over 7151.00 frames.], tot_loss[loss=0.2055, simple_loss=0.2876, pruned_loss=0.06171, over 1386650.88 frames.], batch size: 23, lr: 1.24e-03 +2022-05-27 16:41:30,442 INFO [train.py:823] (2/4) Epoch 13, batch 850, loss[loss=0.225, simple_loss=0.3029, pruned_loss=0.07349, over 7275.00 frames.], tot_loss[loss=0.2052, simple_loss=0.2871, pruned_loss=0.06168, over 1396365.82 frames.], batch size: 20, lr: 1.23e-03 +2022-05-27 16:42:09,512 INFO [train.py:823] (2/4) Epoch 13, batch 900, loss[loss=0.1726, simple_loss=0.2556, pruned_loss=0.04481, over 7304.00 frames.], tot_loss[loss=0.2049, simple_loss=0.2874, pruned_loss=0.06119, over 1396532.58 frames.], batch size: 19, lr: 1.23e-03 +2022-05-27 16:42:48,663 INFO [train.py:823] (2/4) Epoch 13, batch 950, loss[loss=0.1772, simple_loss=0.2436, pruned_loss=0.05536, over 7002.00 frames.], tot_loss[loss=0.2042, simple_loss=0.2868, pruned_loss=0.06083, over 1395705.23 frames.], batch size: 16, lr: 1.23e-03 +2022-05-27 16:43:01,789 INFO [train.py:823] (2/4) Epoch 14, batch 0, loss[loss=0.183, simple_loss=0.2633, pruned_loss=0.05134, over 7316.00 frames.], tot_loss[loss=0.183, simple_loss=0.2633, pruned_loss=0.05134, over 7316.00 frames.], batch size: 22, lr: 1.19e-03 +2022-05-27 16:43:41,484 INFO [train.py:823] (2/4) Epoch 14, batch 50, loss[loss=0.2101, simple_loss=0.3021, pruned_loss=0.05905, over 7213.00 frames.], tot_loss[loss=0.1992, simple_loss=0.2836, pruned_loss=0.0574, over 325071.65 frames.], batch size: 25, lr: 1.19e-03 +2022-05-27 16:44:20,924 INFO [train.py:823] (2/4) Epoch 14, batch 100, loss[loss=0.1867, simple_loss=0.2809, pruned_loss=0.04623, over 7220.00 frames.], tot_loss[loss=0.2021, simple_loss=0.2855, pruned_loss=0.05935, over 570966.24 frames.], batch size: 24, lr: 1.19e-03 +2022-05-27 16:45:00,005 INFO [train.py:823] (2/4) Epoch 14, batch 150, loss[loss=0.1881, simple_loss=0.2853, pruned_loss=0.0455, over 7283.00 frames.], tot_loss[loss=0.2024, simple_loss=0.286, pruned_loss=0.05941, over 755132.50 frames.], batch size: 21, lr: 1.18e-03 +2022-05-27 16:45:39,643 INFO [train.py:823] (2/4) Epoch 14, batch 200, loss[loss=0.2083, simple_loss=0.2916, pruned_loss=0.06253, over 7376.00 frames.], tot_loss[loss=0.2027, simple_loss=0.2863, pruned_loss=0.05957, over 900443.55 frames.], batch size: 21, lr: 1.18e-03 +2022-05-27 16:46:18,503 INFO [train.py:823] (2/4) Epoch 14, batch 250, loss[loss=0.2284, simple_loss=0.2963, pruned_loss=0.0803, over 7295.00 frames.], tot_loss[loss=0.2, simple_loss=0.2835, pruned_loss=0.05824, over 1018626.96 frames.], batch size: 19, lr: 1.18e-03 +2022-05-27 16:46:57,746 INFO [train.py:823] (2/4) Epoch 14, batch 300, loss[loss=0.2334, simple_loss=0.3046, pruned_loss=0.08107, over 6374.00 frames.], tot_loss[loss=0.2, simple_loss=0.2831, pruned_loss=0.05847, over 1096978.46 frames.], batch size: 34, lr: 1.18e-03 +2022-05-27 16:47:36,972 INFO [train.py:823] (2/4) Epoch 14, batch 350, loss[loss=0.1722, simple_loss=0.2555, pruned_loss=0.04441, over 7303.00 frames.], tot_loss[loss=0.2012, simple_loss=0.2843, pruned_loss=0.05908, over 1174022.43 frames.], batch size: 19, lr: 1.18e-03 +2022-05-27 16:48:15,996 INFO [train.py:823] (2/4) Epoch 14, batch 400, loss[loss=0.1938, simple_loss=0.2815, pruned_loss=0.05304, over 7303.00 frames.], tot_loss[loss=0.2013, simple_loss=0.2842, pruned_loss=0.05921, over 1228702.29 frames.], batch size: 19, lr: 1.17e-03 +2022-05-27 16:48:54,621 INFO [train.py:823] (2/4) Epoch 14, batch 450, loss[loss=0.2192, simple_loss=0.2891, pruned_loss=0.07465, over 7097.00 frames.], tot_loss[loss=0.2028, simple_loss=0.2856, pruned_loss=0.06, over 1267520.13 frames.], batch size: 18, lr: 1.17e-03 +2022-05-27 16:49:33,671 INFO [train.py:823] (2/4) Epoch 14, batch 500, loss[loss=0.2112, simple_loss=0.301, pruned_loss=0.06073, over 7181.00 frames.], tot_loss[loss=0.2017, simple_loss=0.2846, pruned_loss=0.0594, over 1302716.11 frames.], batch size: 21, lr: 1.17e-03 +2022-05-27 16:50:12,872 INFO [train.py:823] (2/4) Epoch 14, batch 550, loss[loss=0.1909, simple_loss=0.2807, pruned_loss=0.05057, over 7244.00 frames.], tot_loss[loss=0.2014, simple_loss=0.2841, pruned_loss=0.05931, over 1333362.82 frames.], batch size: 25, lr: 1.17e-03 +2022-05-27 16:50:52,405 INFO [train.py:823] (2/4) Epoch 14, batch 600, loss[loss=0.1904, simple_loss=0.2687, pruned_loss=0.05605, over 7389.00 frames.], tot_loss[loss=0.2003, simple_loss=0.2824, pruned_loss=0.05913, over 1354459.78 frames.], batch size: 19, lr: 1.17e-03 +2022-05-27 16:51:31,502 INFO [train.py:823] (2/4) Epoch 14, batch 650, loss[loss=0.1912, simple_loss=0.2617, pruned_loss=0.06032, over 7289.00 frames.], tot_loss[loss=0.2005, simple_loss=0.283, pruned_loss=0.05904, over 1367613.08 frames.], batch size: 17, lr: 1.16e-03 +2022-05-27 16:52:10,528 INFO [train.py:823] (2/4) Epoch 14, batch 700, loss[loss=0.1989, simple_loss=0.2828, pruned_loss=0.0575, over 7292.00 frames.], tot_loss[loss=0.2012, simple_loss=0.2836, pruned_loss=0.05938, over 1376803.55 frames.], batch size: 21, lr: 1.16e-03 +2022-05-27 16:52:49,143 INFO [train.py:823] (2/4) Epoch 14, batch 750, loss[loss=0.1904, simple_loss=0.2798, pruned_loss=0.05052, over 7112.00 frames.], tot_loss[loss=0.2, simple_loss=0.2829, pruned_loss=0.05859, over 1387204.33 frames.], batch size: 20, lr: 1.16e-03 +2022-05-27 16:53:28,675 INFO [train.py:823] (2/4) Epoch 14, batch 800, loss[loss=0.1887, simple_loss=0.2741, pruned_loss=0.05163, over 7196.00 frames.], tot_loss[loss=0.2001, simple_loss=0.2828, pruned_loss=0.0587, over 1393288.40 frames.], batch size: 19, lr: 1.16e-03 +2022-05-27 16:54:09,174 INFO [train.py:823] (2/4) Epoch 14, batch 850, loss[loss=0.1745, simple_loss=0.2698, pruned_loss=0.03961, over 7275.00 frames.], tot_loss[loss=0.2004, simple_loss=0.2833, pruned_loss=0.05878, over 1396320.35 frames.], batch size: 20, lr: 1.16e-03 +2022-05-27 16:54:48,413 INFO [train.py:823] (2/4) Epoch 14, batch 900, loss[loss=0.1727, simple_loss=0.2462, pruned_loss=0.04964, over 7415.00 frames.], tot_loss[loss=0.2, simple_loss=0.2826, pruned_loss=0.05872, over 1401445.65 frames.], batch size: 18, lr: 1.15e-03 +2022-05-27 16:55:39,870 INFO [train.py:823] (2/4) Epoch 15, batch 0, loss[loss=0.1844, simple_loss=0.2691, pruned_loss=0.04988, over 7203.00 frames.], tot_loss[loss=0.1844, simple_loss=0.2691, pruned_loss=0.04988, over 7203.00 frames.], batch size: 19, lr: 1.12e-03 +2022-05-27 16:56:18,971 INFO [train.py:823] (2/4) Epoch 15, batch 50, loss[loss=0.2217, simple_loss=0.2999, pruned_loss=0.07168, over 7190.00 frames.], tot_loss[loss=0.2044, simple_loss=0.2874, pruned_loss=0.06074, over 319879.13 frames.], batch size: 18, lr: 1.12e-03 +2022-05-27 16:56:57,798 INFO [train.py:823] (2/4) Epoch 15, batch 100, loss[loss=0.1937, simple_loss=0.2809, pruned_loss=0.05319, over 7428.00 frames.], tot_loss[loss=0.1991, simple_loss=0.2828, pruned_loss=0.05775, over 560521.36 frames.], batch size: 22, lr: 1.11e-03 +2022-05-27 16:57:38,759 INFO [train.py:823] (2/4) Epoch 15, batch 150, loss[loss=0.1733, simple_loss=0.2463, pruned_loss=0.05018, over 7284.00 frames.], tot_loss[loss=0.1987, simple_loss=0.2816, pruned_loss=0.05788, over 752302.27 frames.], batch size: 17, lr: 1.11e-03 +2022-05-27 16:58:17,788 INFO [train.py:823] (2/4) Epoch 15, batch 200, loss[loss=0.2083, simple_loss=0.2913, pruned_loss=0.06269, over 7152.00 frames.], tot_loss[loss=0.1996, simple_loss=0.2825, pruned_loss=0.05833, over 899224.04 frames.], batch size: 23, lr: 1.11e-03 +2022-05-27 16:58:57,122 INFO [train.py:823] (2/4) Epoch 15, batch 250, loss[loss=0.2088, simple_loss=0.2968, pruned_loss=0.06035, over 6589.00 frames.], tot_loss[loss=0.198, simple_loss=0.2813, pruned_loss=0.05737, over 1015840.81 frames.], batch size: 34, lr: 1.11e-03 +2022-05-27 16:59:36,318 INFO [train.py:823] (2/4) Epoch 15, batch 300, loss[loss=0.2313, simple_loss=0.2978, pruned_loss=0.08243, over 7189.00 frames.], tot_loss[loss=0.198, simple_loss=0.2808, pruned_loss=0.05756, over 1105518.82 frames.], batch size: 18, lr: 1.11e-03 +2022-05-27 17:00:15,500 INFO [train.py:823] (2/4) Epoch 15, batch 350, loss[loss=0.1805, simple_loss=0.2727, pruned_loss=0.04416, over 7381.00 frames.], tot_loss[loss=0.1982, simple_loss=0.2818, pruned_loss=0.05733, over 1177676.67 frames.], batch size: 20, lr: 1.10e-03 +2022-05-27 17:00:54,549 INFO [train.py:823] (2/4) Epoch 15, batch 400, loss[loss=0.1789, simple_loss=0.2686, pruned_loss=0.04455, over 7104.00 frames.], tot_loss[loss=0.1975, simple_loss=0.2811, pruned_loss=0.05694, over 1229275.39 frames.], batch size: 19, lr: 1.10e-03 +2022-05-27 17:01:34,115 INFO [train.py:823] (2/4) Epoch 15, batch 450, loss[loss=0.2216, simple_loss=0.3065, pruned_loss=0.06831, over 7227.00 frames.], tot_loss[loss=0.197, simple_loss=0.281, pruned_loss=0.05652, over 1276664.89 frames.], batch size: 24, lr: 1.10e-03 +2022-05-27 17:02:13,065 INFO [train.py:823] (2/4) Epoch 15, batch 500, loss[loss=0.209, simple_loss=0.292, pruned_loss=0.06302, over 7107.00 frames.], tot_loss[loss=0.1978, simple_loss=0.2816, pruned_loss=0.05697, over 1312534.49 frames.], batch size: 20, lr: 1.10e-03 +2022-05-27 17:02:53,417 INFO [train.py:823] (2/4) Epoch 15, batch 550, loss[loss=0.2036, simple_loss=0.2713, pruned_loss=0.06798, over 7039.00 frames.], tot_loss[loss=0.197, simple_loss=0.2809, pruned_loss=0.05661, over 1332999.89 frames.], batch size: 17, lr: 1.10e-03 +2022-05-27 17:03:32,334 INFO [train.py:823] (2/4) Epoch 15, batch 600, loss[loss=0.2097, simple_loss=0.2909, pruned_loss=0.06428, over 7301.00 frames.], tot_loss[loss=0.1973, simple_loss=0.2811, pruned_loss=0.05676, over 1356027.30 frames.], batch size: 19, lr: 1.10e-03 +2022-05-27 17:04:11,957 INFO [train.py:823] (2/4) Epoch 15, batch 650, loss[loss=0.2004, simple_loss=0.2858, pruned_loss=0.05751, over 7175.00 frames.], tot_loss[loss=0.1963, simple_loss=0.2809, pruned_loss=0.05588, over 1367606.64 frames.], batch size: 22, lr: 1.09e-03 +2022-05-27 17:04:51,176 INFO [train.py:823] (2/4) Epoch 15, batch 700, loss[loss=0.1922, simple_loss=0.2798, pruned_loss=0.05226, over 6938.00 frames.], tot_loss[loss=0.1952, simple_loss=0.2798, pruned_loss=0.05535, over 1382114.20 frames.], batch size: 29, lr: 1.09e-03 +2022-05-27 17:05:30,536 INFO [train.py:823] (2/4) Epoch 15, batch 750, loss[loss=0.2071, simple_loss=0.2865, pruned_loss=0.06383, over 5056.00 frames.], tot_loss[loss=0.1959, simple_loss=0.2802, pruned_loss=0.05579, over 1384690.17 frames.], batch size: 47, lr: 1.09e-03 +2022-05-27 17:06:09,225 INFO [train.py:823] (2/4) Epoch 15, batch 800, loss[loss=0.1727, simple_loss=0.2588, pruned_loss=0.04334, over 7195.00 frames.], tot_loss[loss=0.1955, simple_loss=0.2802, pruned_loss=0.05539, over 1389388.42 frames.], batch size: 19, lr: 1.09e-03 +2022-05-27 17:06:48,519 INFO [train.py:823] (2/4) Epoch 15, batch 850, loss[loss=0.2081, simple_loss=0.3007, pruned_loss=0.05772, over 7213.00 frames.], tot_loss[loss=0.1959, simple_loss=0.2804, pruned_loss=0.05566, over 1394494.96 frames.], batch size: 25, lr: 1.09e-03 +2022-05-27 17:07:27,495 INFO [train.py:823] (2/4) Epoch 15, batch 900, loss[loss=0.2073, simple_loss=0.2768, pruned_loss=0.06886, over 7097.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2813, pruned_loss=0.05623, over 1399247.45 frames.], batch size: 19, lr: 1.09e-03 +2022-05-27 17:08:06,684 INFO [train.py:823] (2/4) Epoch 15, batch 950, loss[loss=0.2149, simple_loss=0.2922, pruned_loss=0.06885, over 5109.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2809, pruned_loss=0.05639, over 1380211.53 frames.], batch size: 46, lr: 1.08e-03 +2022-05-27 17:08:19,735 INFO [train.py:823] (2/4) Epoch 16, batch 0, loss[loss=0.1897, simple_loss=0.2695, pruned_loss=0.05491, over 5204.00 frames.], tot_loss[loss=0.1897, simple_loss=0.2695, pruned_loss=0.05491, over 5204.00 frames.], batch size: 46, lr: 1.05e-03 +2022-05-27 17:08:58,780 INFO [train.py:823] (2/4) Epoch 16, batch 50, loss[loss=0.1512, simple_loss=0.2331, pruned_loss=0.03463, over 7018.00 frames.], tot_loss[loss=0.1958, simple_loss=0.2796, pruned_loss=0.05599, over 318030.81 frames.], batch size: 16, lr: 1.05e-03 +2022-05-27 17:09:38,443 INFO [train.py:823] (2/4) Epoch 16, batch 100, loss[loss=0.1915, simple_loss=0.2913, pruned_loss=0.04585, over 7204.00 frames.], tot_loss[loss=0.1936, simple_loss=0.2781, pruned_loss=0.0546, over 560527.34 frames.], batch size: 19, lr: 1.05e-03 +2022-05-27 17:10:17,980 INFO [train.py:823] (2/4) Epoch 16, batch 150, loss[loss=0.2163, simple_loss=0.2898, pruned_loss=0.07142, over 7384.00 frames.], tot_loss[loss=0.1918, simple_loss=0.2762, pruned_loss=0.05369, over 756478.61 frames.], batch size: 19, lr: 1.05e-03 +2022-05-27 17:10:57,613 INFO [train.py:823] (2/4) Epoch 16, batch 200, loss[loss=0.2209, simple_loss=0.2973, pruned_loss=0.07221, over 7179.00 frames.], tot_loss[loss=0.1932, simple_loss=0.2773, pruned_loss=0.05454, over 903993.07 frames.], batch size: 23, lr: 1.05e-03 +2022-05-27 17:11:36,385 INFO [train.py:823] (2/4) Epoch 16, batch 250, loss[loss=0.2168, simple_loss=0.2923, pruned_loss=0.07072, over 7181.00 frames.], tot_loss[loss=0.1939, simple_loss=0.2781, pruned_loss=0.05484, over 1012725.22 frames.], batch size: 25, lr: 1.04e-03 +2022-05-27 17:12:16,037 INFO [train.py:823] (2/4) Epoch 16, batch 300, loss[loss=0.2032, simple_loss=0.2916, pruned_loss=0.05735, over 7222.00 frames.], tot_loss[loss=0.193, simple_loss=0.2767, pruned_loss=0.0546, over 1105772.08 frames.], batch size: 24, lr: 1.04e-03 +2022-05-27 17:12:55,377 INFO [train.py:823] (2/4) Epoch 16, batch 350, loss[loss=0.2013, simple_loss=0.2859, pruned_loss=0.05836, over 7342.00 frames.], tot_loss[loss=0.1945, simple_loss=0.2782, pruned_loss=0.05543, over 1173220.43 frames.], batch size: 23, lr: 1.04e-03 +2022-05-27 17:13:34,617 INFO [train.py:823] (2/4) Epoch 16, batch 400, loss[loss=0.1874, simple_loss=0.2798, pruned_loss=0.04753, over 7284.00 frames.], tot_loss[loss=0.1943, simple_loss=0.2785, pruned_loss=0.05504, over 1228584.76 frames.], batch size: 19, lr: 1.04e-03 +2022-05-27 17:14:13,510 INFO [train.py:823] (2/4) Epoch 16, batch 450, loss[loss=0.1889, simple_loss=0.281, pruned_loss=0.04837, over 7408.00 frames.], tot_loss[loss=0.1941, simple_loss=0.2786, pruned_loss=0.05482, over 1275413.58 frames.], batch size: 22, lr: 1.04e-03 +2022-05-27 17:14:53,422 INFO [train.py:823] (2/4) Epoch 16, batch 500, loss[loss=0.2115, simple_loss=0.306, pruned_loss=0.05852, over 6985.00 frames.], tot_loss[loss=0.1936, simple_loss=0.2779, pruned_loss=0.05464, over 1311464.19 frames.], batch size: 29, lr: 1.04e-03 +2022-05-27 17:15:32,531 INFO [train.py:823] (2/4) Epoch 16, batch 550, loss[loss=0.2198, simple_loss=0.3144, pruned_loss=0.06264, over 7369.00 frames.], tot_loss[loss=0.1946, simple_loss=0.2792, pruned_loss=0.05498, over 1329160.84 frames.], batch size: 21, lr: 1.03e-03 +2022-05-27 17:16:11,613 INFO [train.py:823] (2/4) Epoch 16, batch 600, loss[loss=0.1927, simple_loss=0.2727, pruned_loss=0.05637, over 7099.00 frames.], tot_loss[loss=0.1941, simple_loss=0.279, pruned_loss=0.05462, over 1343973.78 frames.], batch size: 19, lr: 1.03e-03 +2022-05-27 17:16:52,337 INFO [train.py:823] (2/4) Epoch 16, batch 650, loss[loss=0.2011, simple_loss=0.2801, pruned_loss=0.0611, over 7191.00 frames.], tot_loss[loss=0.1954, simple_loss=0.2806, pruned_loss=0.05513, over 1360172.87 frames.], batch size: 16, lr: 1.03e-03 +2022-05-27 17:17:31,509 INFO [train.py:823] (2/4) Epoch 16, batch 700, loss[loss=0.1654, simple_loss=0.26, pruned_loss=0.03541, over 7294.00 frames.], tot_loss[loss=0.1949, simple_loss=0.2797, pruned_loss=0.05507, over 1370662.70 frames.], batch size: 19, lr: 1.03e-03 +2022-05-27 17:18:11,875 INFO [train.py:823] (2/4) Epoch 16, batch 750, loss[loss=0.1779, simple_loss=0.2596, pruned_loss=0.04805, over 7181.00 frames.], tot_loss[loss=0.1958, simple_loss=0.2805, pruned_loss=0.05557, over 1382673.20 frames.], batch size: 18, lr: 1.03e-03 +2022-05-27 17:18:51,409 INFO [train.py:823] (2/4) Epoch 16, batch 800, loss[loss=0.1659, simple_loss=0.2594, pruned_loss=0.03626, over 7378.00 frames.], tot_loss[loss=0.1944, simple_loss=0.2793, pruned_loss=0.0548, over 1393118.91 frames.], batch size: 20, lr: 1.03e-03 +2022-05-27 17:19:30,600 INFO [train.py:823] (2/4) Epoch 16, batch 850, loss[loss=0.185, simple_loss=0.281, pruned_loss=0.04445, over 7191.00 frames.], tot_loss[loss=0.1942, simple_loss=0.2792, pruned_loss=0.05457, over 1400205.50 frames.], batch size: 21, lr: 1.03e-03 +2022-05-27 17:20:11,152 INFO [train.py:823] (2/4) Epoch 16, batch 900, loss[loss=0.1781, simple_loss=0.258, pruned_loss=0.04911, over 7048.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2783, pruned_loss=0.05414, over 1401352.48 frames.], batch size: 17, lr: 1.02e-03 +2022-05-27 17:21:02,489 INFO [train.py:823] (2/4) Epoch 17, batch 0, loss[loss=0.1984, simple_loss=0.2905, pruned_loss=0.05317, over 7194.00 frames.], tot_loss[loss=0.1984, simple_loss=0.2905, pruned_loss=0.05317, over 7194.00 frames.], batch size: 21, lr: 9.94e-04 +2022-05-27 17:21:41,995 INFO [train.py:823] (2/4) Epoch 17, batch 50, loss[loss=0.1823, simple_loss=0.2737, pruned_loss=0.04545, over 6961.00 frames.], tot_loss[loss=0.1988, simple_loss=0.2818, pruned_loss=0.05793, over 315280.01 frames.], batch size: 26, lr: 9.92e-04 +2022-05-27 17:22:21,126 INFO [train.py:823] (2/4) Epoch 17, batch 100, loss[loss=0.232, simple_loss=0.3143, pruned_loss=0.0748, over 6981.00 frames.], tot_loss[loss=0.1953, simple_loss=0.2795, pruned_loss=0.05552, over 561397.29 frames.], batch size: 26, lr: 9.91e-04 +2022-05-27 17:23:00,056 INFO [train.py:823] (2/4) Epoch 17, batch 150, loss[loss=0.1583, simple_loss=0.2451, pruned_loss=0.03571, over 7189.00 frames.], tot_loss[loss=0.1928, simple_loss=0.2779, pruned_loss=0.05386, over 748632.73 frames.], batch size: 18, lr: 9.89e-04 +2022-05-27 17:23:38,685 INFO [train.py:823] (2/4) Epoch 17, batch 200, loss[loss=0.2246, simple_loss=0.3208, pruned_loss=0.06419, over 6908.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2781, pruned_loss=0.05306, over 896806.88 frames.], batch size: 29, lr: 9.88e-04 +2022-05-27 17:24:18,085 INFO [train.py:823] (2/4) Epoch 17, batch 250, loss[loss=0.1863, simple_loss=0.2757, pruned_loss=0.04844, over 7352.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2779, pruned_loss=0.05223, over 1017719.87 frames.], batch size: 23, lr: 9.86e-04 +2022-05-27 17:24:57,507 INFO [train.py:823] (2/4) Epoch 17, batch 300, loss[loss=0.2017, simple_loss=0.2747, pruned_loss=0.06436, over 7307.00 frames.], tot_loss[loss=0.1909, simple_loss=0.2777, pruned_loss=0.05209, over 1104229.44 frames.], batch size: 18, lr: 9.85e-04 +2022-05-27 17:25:36,800 INFO [train.py:823] (2/4) Epoch 17, batch 350, loss[loss=0.196, simple_loss=0.2734, pruned_loss=0.05932, over 7396.00 frames.], tot_loss[loss=0.1921, simple_loss=0.2781, pruned_loss=0.0531, over 1170506.60 frames.], batch size: 19, lr: 9.84e-04 +2022-05-27 17:26:17,379 INFO [train.py:823] (2/4) Epoch 17, batch 400, loss[loss=0.1767, simple_loss=0.2677, pruned_loss=0.04288, over 7102.00 frames.], tot_loss[loss=0.1908, simple_loss=0.2758, pruned_loss=0.05288, over 1225896.03 frames.], batch size: 19, lr: 9.82e-04 +2022-05-27 17:26:56,042 INFO [train.py:823] (2/4) Epoch 17, batch 450, loss[loss=0.2093, simple_loss=0.2911, pruned_loss=0.06377, over 5140.00 frames.], tot_loss[loss=0.1914, simple_loss=0.2763, pruned_loss=0.05326, over 1261914.67 frames.], batch size: 47, lr: 9.81e-04 +2022-05-27 17:27:35,213 INFO [train.py:823] (2/4) Epoch 17, batch 500, loss[loss=0.1932, simple_loss=0.2666, pruned_loss=0.05986, over 7002.00 frames.], tot_loss[loss=0.1911, simple_loss=0.2763, pruned_loss=0.05295, over 1296998.57 frames.], batch size: 16, lr: 9.79e-04 +2022-05-27 17:28:14,714 INFO [train.py:823] (2/4) Epoch 17, batch 550, loss[loss=0.215, simple_loss=0.3007, pruned_loss=0.06467, over 7112.00 frames.], tot_loss[loss=0.1929, simple_loss=0.2775, pruned_loss=0.05421, over 1326845.70 frames.], batch size: 20, lr: 9.78e-04 +2022-05-27 17:28:53,877 INFO [train.py:823] (2/4) Epoch 17, batch 600, loss[loss=0.2015, simple_loss=0.285, pruned_loss=0.059, over 7311.00 frames.], tot_loss[loss=0.1926, simple_loss=0.2774, pruned_loss=0.05386, over 1349321.06 frames.], batch size: 22, lr: 9.76e-04 +2022-05-27 17:29:33,209 INFO [train.py:823] (2/4) Epoch 17, batch 650, loss[loss=0.1891, simple_loss=0.264, pruned_loss=0.05709, over 7000.00 frames.], tot_loss[loss=0.1922, simple_loss=0.2765, pruned_loss=0.05395, over 1362110.23 frames.], batch size: 16, lr: 9.75e-04 +2022-05-27 17:30:12,324 INFO [train.py:823] (2/4) Epoch 17, batch 700, loss[loss=0.1687, simple_loss=0.2438, pruned_loss=0.04677, over 6803.00 frames.], tot_loss[loss=0.1923, simple_loss=0.2766, pruned_loss=0.05399, over 1373084.45 frames.], batch size: 15, lr: 9.74e-04 +2022-05-27 17:30:51,266 INFO [train.py:823] (2/4) Epoch 17, batch 750, loss[loss=0.1963, simple_loss=0.2756, pruned_loss=0.05848, over 7150.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2753, pruned_loss=0.05356, over 1385801.54 frames.], batch size: 17, lr: 9.72e-04 +2022-05-27 17:31:30,699 INFO [train.py:823] (2/4) Epoch 17, batch 800, loss[loss=0.1754, simple_loss=0.2494, pruned_loss=0.05071, over 7014.00 frames.], tot_loss[loss=0.1917, simple_loss=0.2761, pruned_loss=0.05365, over 1389300.31 frames.], batch size: 16, lr: 9.71e-04 +2022-05-27 17:32:13,822 INFO [train.py:823] (2/4) Epoch 17, batch 850, loss[loss=0.1821, simple_loss=0.2872, pruned_loss=0.03851, over 7409.00 frames.], tot_loss[loss=0.1903, simple_loss=0.2751, pruned_loss=0.05274, over 1395806.71 frames.], batch size: 22, lr: 9.69e-04 +2022-05-27 17:32:52,894 INFO [train.py:823] (2/4) Epoch 17, batch 900, loss[loss=0.1835, simple_loss=0.2587, pruned_loss=0.05412, over 7297.00 frames.], tot_loss[loss=0.1903, simple_loss=0.2751, pruned_loss=0.05274, over 1401993.16 frames.], batch size: 17, lr: 9.68e-04 +2022-05-27 17:33:32,129 INFO [train.py:823] (2/4) Epoch 17, batch 950, loss[loss=0.207, simple_loss=0.2913, pruned_loss=0.0613, over 4749.00 frames.], tot_loss[loss=0.1918, simple_loss=0.2763, pruned_loss=0.05365, over 1396464.97 frames.], batch size: 46, lr: 9.67e-04 +2022-05-27 17:33:44,919 INFO [train.py:823] (2/4) Epoch 18, batch 0, loss[loss=0.1937, simple_loss=0.2888, pruned_loss=0.0493, over 7366.00 frames.], tot_loss[loss=0.1937, simple_loss=0.2888, pruned_loss=0.0493, over 7366.00 frames.], batch size: 21, lr: 9.41e-04 +2022-05-27 17:34:24,286 INFO [train.py:823] (2/4) Epoch 18, batch 50, loss[loss=0.1858, simple_loss=0.2725, pruned_loss=0.04958, over 7319.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2759, pruned_loss=0.05277, over 322224.29 frames.], batch size: 23, lr: 9.40e-04 +2022-05-27 17:35:03,321 INFO [train.py:823] (2/4) Epoch 18, batch 100, loss[loss=0.1667, simple_loss=0.252, pruned_loss=0.04075, over 7280.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2759, pruned_loss=0.05278, over 564217.13 frames.], batch size: 20, lr: 9.39e-04 +2022-05-27 17:35:42,664 INFO [train.py:823] (2/4) Epoch 18, batch 150, loss[loss=0.2012, simple_loss=0.2885, pruned_loss=0.05697, over 7200.00 frames.], tot_loss[loss=0.1888, simple_loss=0.2735, pruned_loss=0.052, over 756982.47 frames.], batch size: 20, lr: 9.37e-04 +2022-05-27 17:36:21,807 INFO [train.py:823] (2/4) Epoch 18, batch 200, loss[loss=0.1914, simple_loss=0.2844, pruned_loss=0.04922, over 7279.00 frames.], tot_loss[loss=0.1881, simple_loss=0.2735, pruned_loss=0.05136, over 907630.87 frames.], batch size: 21, lr: 9.36e-04 +2022-05-27 17:37:01,153 INFO [train.py:823] (2/4) Epoch 18, batch 250, loss[loss=0.2008, simple_loss=0.2873, pruned_loss=0.05717, over 7302.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2759, pruned_loss=0.05277, over 1015857.20 frames.], batch size: 22, lr: 9.35e-04 +2022-05-27 17:37:40,124 INFO [train.py:823] (2/4) Epoch 18, batch 300, loss[loss=0.1514, simple_loss=0.2385, pruned_loss=0.03214, over 7035.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2744, pruned_loss=0.0519, over 1105644.21 frames.], batch size: 17, lr: 9.33e-04 +2022-05-27 17:38:19,204 INFO [train.py:823] (2/4) Epoch 18, batch 350, loss[loss=0.1942, simple_loss=0.2862, pruned_loss=0.05112, over 7281.00 frames.], tot_loss[loss=0.1886, simple_loss=0.2743, pruned_loss=0.05146, over 1176032.62 frames.], batch size: 20, lr: 9.32e-04 +2022-05-27 17:38:58,354 INFO [train.py:823] (2/4) Epoch 18, batch 400, loss[loss=0.1839, simple_loss=0.2629, pruned_loss=0.05246, over 7379.00 frames.], tot_loss[loss=0.1894, simple_loss=0.2754, pruned_loss=0.0517, over 1227446.55 frames.], batch size: 19, lr: 9.31e-04 +2022-05-27 17:39:39,247 INFO [train.py:823] (2/4) Epoch 18, batch 450, loss[loss=0.1904, simple_loss=0.2813, pruned_loss=0.04974, over 7164.00 frames.], tot_loss[loss=0.1887, simple_loss=0.2752, pruned_loss=0.05112, over 1269236.22 frames.], batch size: 23, lr: 9.29e-04 +2022-05-27 17:40:18,346 INFO [train.py:823] (2/4) Epoch 18, batch 500, loss[loss=0.1972, simple_loss=0.2897, pruned_loss=0.05231, over 7411.00 frames.], tot_loss[loss=0.1878, simple_loss=0.2744, pruned_loss=0.05059, over 1307195.20 frames.], batch size: 22, lr: 9.28e-04 +2022-05-27 17:40:59,022 INFO [train.py:823] (2/4) Epoch 18, batch 550, loss[loss=0.1723, simple_loss=0.2609, pruned_loss=0.04184, over 7339.00 frames.], tot_loss[loss=0.1863, simple_loss=0.273, pruned_loss=0.04977, over 1335424.34 frames.], batch size: 23, lr: 9.27e-04 +2022-05-27 17:41:37,898 INFO [train.py:823] (2/4) Epoch 18, batch 600, loss[loss=0.1871, simple_loss=0.2558, pruned_loss=0.05922, over 7303.00 frames.], tot_loss[loss=0.1863, simple_loss=0.2727, pruned_loss=0.04996, over 1357547.13 frames.], batch size: 19, lr: 9.26e-04 +2022-05-27 17:42:17,278 INFO [train.py:823] (2/4) Epoch 18, batch 650, loss[loss=0.1528, simple_loss=0.2403, pruned_loss=0.03267, over 7098.00 frames.], tot_loss[loss=0.1861, simple_loss=0.2724, pruned_loss=0.04989, over 1371122.21 frames.], batch size: 19, lr: 9.24e-04 +2022-05-27 17:42:57,800 INFO [train.py:823] (2/4) Epoch 18, batch 700, loss[loss=0.1797, simple_loss=0.2653, pruned_loss=0.04706, over 7199.00 frames.], tot_loss[loss=0.187, simple_loss=0.2728, pruned_loss=0.05059, over 1377187.61 frames.], batch size: 19, lr: 9.23e-04 +2022-05-27 17:43:37,169 INFO [train.py:823] (2/4) Epoch 18, batch 750, loss[loss=0.1745, simple_loss=0.2531, pruned_loss=0.04798, over 7108.00 frames.], tot_loss[loss=0.1868, simple_loss=0.2724, pruned_loss=0.05061, over 1388955.22 frames.], batch size: 18, lr: 9.22e-04 +2022-05-27 17:44:16,190 INFO [train.py:823] (2/4) Epoch 18, batch 800, loss[loss=0.1815, simple_loss=0.2818, pruned_loss=0.0406, over 7186.00 frames.], tot_loss[loss=0.1862, simple_loss=0.2721, pruned_loss=0.05015, over 1392005.60 frames.], batch size: 20, lr: 9.21e-04 +2022-05-27 17:44:55,804 INFO [train.py:823] (2/4) Epoch 18, batch 850, loss[loss=0.198, simple_loss=0.2862, pruned_loss=0.05487, over 7184.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2716, pruned_loss=0.0499, over 1395431.90 frames.], batch size: 21, lr: 9.19e-04 +2022-05-27 17:45:34,721 INFO [train.py:823] (2/4) Epoch 18, batch 900, loss[loss=0.1624, simple_loss=0.2389, pruned_loss=0.04295, over 7140.00 frames.], tot_loss[loss=0.1861, simple_loss=0.272, pruned_loss=0.05008, over 1402148.87 frames.], batch size: 17, lr: 9.18e-04 +2022-05-27 17:46:13,717 INFO [train.py:823] (2/4) Epoch 18, batch 950, loss[loss=0.1886, simple_loss=0.2824, pruned_loss=0.04737, over 4725.00 frames.], tot_loss[loss=0.1869, simple_loss=0.2726, pruned_loss=0.05064, over 1373957.03 frames.], batch size: 46, lr: 9.17e-04 +2022-05-27 17:46:27,017 INFO [train.py:823] (2/4) Epoch 19, batch 0, loss[loss=0.2075, simple_loss=0.2964, pruned_loss=0.05933, over 7032.00 frames.], tot_loss[loss=0.2075, simple_loss=0.2964, pruned_loss=0.05933, over 7032.00 frames.], batch size: 26, lr: 8.94e-04 +2022-05-27 17:47:05,771 INFO [train.py:823] (2/4) Epoch 19, batch 50, loss[loss=0.1786, simple_loss=0.2558, pruned_loss=0.05068, over 7184.00 frames.], tot_loss[loss=0.1842, simple_loss=0.2687, pruned_loss=0.04987, over 325498.76 frames.], batch size: 19, lr: 8.92e-04 +2022-05-27 17:47:44,976 INFO [train.py:823] (2/4) Epoch 19, batch 100, loss[loss=0.2186, simple_loss=0.3069, pruned_loss=0.06518, over 6371.00 frames.], tot_loss[loss=0.1833, simple_loss=0.2683, pruned_loss=0.04918, over 566085.67 frames.], batch size: 34, lr: 8.91e-04 +2022-05-27 17:48:24,286 INFO [train.py:823] (2/4) Epoch 19, batch 150, loss[loss=0.1897, simple_loss=0.2706, pruned_loss=0.05446, over 7086.00 frames.], tot_loss[loss=0.1813, simple_loss=0.2672, pruned_loss=0.04773, over 759139.51 frames.], batch size: 18, lr: 8.90e-04 +2022-05-27 17:49:03,239 INFO [train.py:823] (2/4) Epoch 19, batch 200, loss[loss=0.2114, simple_loss=0.3011, pruned_loss=0.06084, over 7157.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2687, pruned_loss=0.04856, over 902295.69 frames.], batch size: 22, lr: 8.89e-04 +2022-05-27 17:49:41,881 INFO [train.py:823] (2/4) Epoch 19, batch 250, loss[loss=0.1555, simple_loss=0.2396, pruned_loss=0.03575, over 7094.00 frames.], tot_loss[loss=0.1836, simple_loss=0.2701, pruned_loss=0.04852, over 1019054.56 frames.], batch size: 19, lr: 8.88e-04 +2022-05-27 17:50:22,342 INFO [train.py:823] (2/4) Epoch 19, batch 300, loss[loss=0.1568, simple_loss=0.2321, pruned_loss=0.04075, over 7011.00 frames.], tot_loss[loss=0.186, simple_loss=0.2714, pruned_loss=0.05025, over 1111105.51 frames.], batch size: 16, lr: 8.87e-04 +2022-05-27 17:51:01,457 INFO [train.py:823] (2/4) Epoch 19, batch 350, loss[loss=0.2543, simple_loss=0.3275, pruned_loss=0.09053, over 7320.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2724, pruned_loss=0.05039, over 1178466.06 frames.], batch size: 18, lr: 8.85e-04 +2022-05-27 17:51:40,822 INFO [train.py:823] (2/4) Epoch 19, batch 400, loss[loss=0.1646, simple_loss=0.2401, pruned_loss=0.04454, over 7001.00 frames.], tot_loss[loss=0.1862, simple_loss=0.2721, pruned_loss=0.05013, over 1236206.86 frames.], batch size: 16, lr: 8.84e-04 +2022-05-27 17:52:20,019 INFO [train.py:823] (2/4) Epoch 19, batch 450, loss[loss=0.1961, simple_loss=0.2884, pruned_loss=0.05189, over 7150.00 frames.], tot_loss[loss=0.1858, simple_loss=0.2721, pruned_loss=0.04976, over 1278898.29 frames.], batch size: 23, lr: 8.83e-04 +2022-05-27 17:52:59,890 INFO [train.py:823] (2/4) Epoch 19, batch 500, loss[loss=0.1861, simple_loss=0.2835, pruned_loss=0.04432, over 6465.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2714, pruned_loss=0.04925, over 1312390.03 frames.], batch size: 34, lr: 8.82e-04 +2022-05-27 17:53:39,218 INFO [train.py:823] (2/4) Epoch 19, batch 550, loss[loss=0.1706, simple_loss=0.2578, pruned_loss=0.04172, over 7422.00 frames.], tot_loss[loss=0.1849, simple_loss=0.2715, pruned_loss=0.04922, over 1335245.73 frames.], batch size: 18, lr: 8.81e-04 +2022-05-27 17:54:18,347 INFO [train.py:823] (2/4) Epoch 19, batch 600, loss[loss=0.1995, simple_loss=0.2732, pruned_loss=0.06286, over 7097.00 frames.], tot_loss[loss=0.1857, simple_loss=0.2723, pruned_loss=0.04959, over 1354306.13 frames.], batch size: 19, lr: 8.80e-04 +2022-05-27 17:54:57,224 INFO [train.py:823] (2/4) Epoch 19, batch 650, loss[loss=0.1621, simple_loss=0.2473, pruned_loss=0.03844, over 7424.00 frames.], tot_loss[loss=0.1852, simple_loss=0.2718, pruned_loss=0.04927, over 1367830.08 frames.], batch size: 18, lr: 8.78e-04 +2022-05-27 17:55:36,446 INFO [train.py:823] (2/4) Epoch 19, batch 700, loss[loss=0.19, simple_loss=0.2805, pruned_loss=0.04977, over 7064.00 frames.], tot_loss[loss=0.1843, simple_loss=0.2709, pruned_loss=0.0488, over 1378997.05 frames.], batch size: 26, lr: 8.77e-04 +2022-05-27 17:56:14,898 INFO [train.py:823] (2/4) Epoch 19, batch 750, loss[loss=0.1788, simple_loss=0.2674, pruned_loss=0.04509, over 7381.00 frames.], tot_loss[loss=0.1847, simple_loss=0.2711, pruned_loss=0.0492, over 1389289.23 frames.], batch size: 21, lr: 8.76e-04 +2022-05-27 17:56:54,498 INFO [train.py:823] (2/4) Epoch 19, batch 800, loss[loss=0.1936, simple_loss=0.2821, pruned_loss=0.05257, over 7319.00 frames.], tot_loss[loss=0.185, simple_loss=0.2716, pruned_loss=0.04927, over 1398456.13 frames.], batch size: 22, lr: 8.75e-04 +2022-05-27 17:57:33,441 INFO [train.py:823] (2/4) Epoch 19, batch 850, loss[loss=0.1749, simple_loss=0.2701, pruned_loss=0.03985, over 7375.00 frames.], tot_loss[loss=0.1848, simple_loss=0.2716, pruned_loss=0.04902, over 1403197.16 frames.], batch size: 21, lr: 8.74e-04 +2022-05-27 17:58:12,622 INFO [train.py:823] (2/4) Epoch 19, batch 900, loss[loss=0.1801, simple_loss=0.2721, pruned_loss=0.04406, over 6967.00 frames.], tot_loss[loss=0.186, simple_loss=0.2725, pruned_loss=0.04978, over 1395032.89 frames.], batch size: 26, lr: 8.73e-04 +2022-05-27 17:59:02,812 INFO [train.py:823] (2/4) Epoch 20, batch 0, loss[loss=0.1789, simple_loss=0.2816, pruned_loss=0.03806, over 6521.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2816, pruned_loss=0.03806, over 6521.00 frames.], batch size: 34, lr: 8.51e-04 +2022-05-27 17:59:42,310 INFO [train.py:823] (2/4) Epoch 20, batch 50, loss[loss=0.1532, simple_loss=0.2341, pruned_loss=0.03619, over 7305.00 frames.], tot_loss[loss=0.1788, simple_loss=0.2674, pruned_loss=0.04507, over 321703.22 frames.], batch size: 18, lr: 8.49e-04 +2022-05-27 18:00:21,137 INFO [train.py:823] (2/4) Epoch 20, batch 100, loss[loss=0.1958, simple_loss=0.2834, pruned_loss=0.05408, over 4915.00 frames.], tot_loss[loss=0.1803, simple_loss=0.268, pruned_loss=0.04632, over 561817.25 frames.], batch size: 48, lr: 8.48e-04 +2022-05-27 18:01:00,442 INFO [train.py:823] (2/4) Epoch 20, batch 150, loss[loss=0.1493, simple_loss=0.2327, pruned_loss=0.03302, over 7293.00 frames.], tot_loss[loss=0.1821, simple_loss=0.2692, pruned_loss=0.04749, over 750736.87 frames.], batch size: 17, lr: 8.47e-04 +2022-05-27 18:01:41,380 INFO [train.py:823] (2/4) Epoch 20, batch 200, loss[loss=0.1697, simple_loss=0.2467, pruned_loss=0.04633, over 7019.00 frames.], tot_loss[loss=0.183, simple_loss=0.2698, pruned_loss=0.0481, over 902253.19 frames.], batch size: 16, lr: 8.46e-04 +2022-05-27 18:02:20,254 INFO [train.py:823] (2/4) Epoch 20, batch 250, loss[loss=0.1756, simple_loss=0.2565, pruned_loss=0.04737, over 7308.00 frames.], tot_loss[loss=0.1814, simple_loss=0.2682, pruned_loss=0.04735, over 1016895.13 frames.], batch size: 18, lr: 8.45e-04 +2022-05-27 18:02:59,638 INFO [train.py:823] (2/4) Epoch 20, batch 300, loss[loss=0.1807, simple_loss=0.2678, pruned_loss=0.04682, over 7311.00 frames.], tot_loss[loss=0.1815, simple_loss=0.2687, pruned_loss=0.04719, over 1106603.27 frames.], batch size: 22, lr: 8.44e-04 +2022-05-27 18:03:38,779 INFO [train.py:823] (2/4) Epoch 20, batch 350, loss[loss=0.1847, simple_loss=0.275, pruned_loss=0.04718, over 7195.00 frames.], tot_loss[loss=0.1816, simple_loss=0.2691, pruned_loss=0.04701, over 1176208.37 frames.], batch size: 20, lr: 8.43e-04 +2022-05-27 18:04:18,920 INFO [train.py:823] (2/4) Epoch 20, batch 400, loss[loss=0.1851, simple_loss=0.2749, pruned_loss=0.0477, over 7150.00 frames.], tot_loss[loss=0.1823, simple_loss=0.2697, pruned_loss=0.04742, over 1230923.61 frames.], batch size: 23, lr: 8.42e-04 +2022-05-27 18:04:57,921 INFO [train.py:823] (2/4) Epoch 20, batch 450, loss[loss=0.1721, simple_loss=0.2424, pruned_loss=0.05092, over 7164.00 frames.], tot_loss[loss=0.1828, simple_loss=0.2703, pruned_loss=0.04769, over 1268809.60 frames.], batch size: 17, lr: 8.41e-04 +2022-05-27 18:05:38,488 INFO [train.py:823] (2/4) Epoch 20, batch 500, loss[loss=0.1899, simple_loss=0.2735, pruned_loss=0.05312, over 7016.00 frames.], tot_loss[loss=0.1833, simple_loss=0.2709, pruned_loss=0.04784, over 1304286.68 frames.], batch size: 17, lr: 8.40e-04 +2022-05-27 18:06:18,327 INFO [train.py:823] (2/4) Epoch 20, batch 550, loss[loss=0.1909, simple_loss=0.2869, pruned_loss=0.04742, over 7139.00 frames.], tot_loss[loss=0.1835, simple_loss=0.2702, pruned_loss=0.04843, over 1332727.88 frames.], batch size: 23, lr: 8.39e-04 +2022-05-27 18:06:57,329 INFO [train.py:823] (2/4) Epoch 20, batch 600, loss[loss=0.229, simple_loss=0.3012, pruned_loss=0.07839, over 7110.00 frames.], tot_loss[loss=0.1842, simple_loss=0.2708, pruned_loss=0.04887, over 1347978.24 frames.], batch size: 18, lr: 8.38e-04 +2022-05-27 18:07:37,168 INFO [train.py:823] (2/4) Epoch 20, batch 650, loss[loss=0.2055, simple_loss=0.3041, pruned_loss=0.05351, over 7003.00 frames.], tot_loss[loss=0.1836, simple_loss=0.27, pruned_loss=0.04861, over 1365280.94 frames.], batch size: 29, lr: 8.37e-04 +2022-05-27 18:08:16,119 INFO [train.py:823] (2/4) Epoch 20, batch 700, loss[loss=0.1749, simple_loss=0.2646, pruned_loss=0.04256, over 7098.00 frames.], tot_loss[loss=0.183, simple_loss=0.2696, pruned_loss=0.0482, over 1379694.80 frames.], batch size: 18, lr: 8.36e-04 +2022-05-27 18:08:55,663 INFO [train.py:823] (2/4) Epoch 20, batch 750, loss[loss=0.1747, simple_loss=0.2714, pruned_loss=0.03899, over 7283.00 frames.], tot_loss[loss=0.1814, simple_loss=0.2682, pruned_loss=0.04737, over 1389019.14 frames.], batch size: 21, lr: 8.35e-04 +2022-05-27 18:09:34,540 INFO [train.py:823] (2/4) Epoch 20, batch 800, loss[loss=0.1444, simple_loss=0.2341, pruned_loss=0.02734, over 7421.00 frames.], tot_loss[loss=0.1816, simple_loss=0.269, pruned_loss=0.04705, over 1397374.09 frames.], batch size: 18, lr: 8.34e-04 +2022-05-27 18:10:13,669 INFO [train.py:823] (2/4) Epoch 20, batch 850, loss[loss=0.1723, simple_loss=0.2695, pruned_loss=0.03758, over 7067.00 frames.], tot_loss[loss=0.1808, simple_loss=0.2682, pruned_loss=0.04673, over 1401145.81 frames.], batch size: 26, lr: 8.33e-04 +2022-05-27 18:10:52,743 INFO [train.py:823] (2/4) Epoch 20, batch 900, loss[loss=0.1439, simple_loss=0.221, pruned_loss=0.03338, over 6739.00 frames.], tot_loss[loss=0.1814, simple_loss=0.2687, pruned_loss=0.0471, over 1398991.58 frames.], batch size: 15, lr: 8.31e-04 +2022-05-27 18:11:42,509 INFO [train.py:823] (2/4) Epoch 21, batch 0, loss[loss=0.1756, simple_loss=0.2582, pruned_loss=0.04652, over 7198.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2582, pruned_loss=0.04652, over 7198.00 frames.], batch size: 18, lr: 8.11e-04 +2022-05-27 18:12:21,478 INFO [train.py:823] (2/4) Epoch 21, batch 50, loss[loss=0.2032, simple_loss=0.2889, pruned_loss=0.0588, over 7211.00 frames.], tot_loss[loss=0.1825, simple_loss=0.269, pruned_loss=0.04801, over 318253.31 frames.], batch size: 25, lr: 8.10e-04 +2022-05-27 18:13:00,543 INFO [train.py:823] (2/4) Epoch 21, batch 100, loss[loss=0.1891, simple_loss=0.2794, pruned_loss=0.04938, over 6457.00 frames.], tot_loss[loss=0.1787, simple_loss=0.2652, pruned_loss=0.04611, over 561620.87 frames.], batch size: 34, lr: 8.09e-04 +2022-05-27 18:13:40,057 INFO [train.py:823] (2/4) Epoch 21, batch 150, loss[loss=0.205, simple_loss=0.2785, pruned_loss=0.06569, over 7278.00 frames.], tot_loss[loss=0.1783, simple_loss=0.2646, pruned_loss=0.046, over 755228.02 frames.], batch size: 20, lr: 8.08e-04 +2022-05-27 18:14:19,925 INFO [train.py:823] (2/4) Epoch 21, batch 200, loss[loss=0.1454, simple_loss=0.2212, pruned_loss=0.03479, over 7314.00 frames.], tot_loss[loss=0.1791, simple_loss=0.2655, pruned_loss=0.04634, over 904320.01 frames.], batch size: 18, lr: 8.07e-04 +2022-05-27 18:14:59,168 INFO [train.py:823] (2/4) Epoch 21, batch 250, loss[loss=0.1985, simple_loss=0.2792, pruned_loss=0.05888, over 7284.00 frames.], tot_loss[loss=0.1805, simple_loss=0.267, pruned_loss=0.04706, over 1011675.67 frames.], batch size: 20, lr: 8.06e-04 +2022-05-27 18:15:37,873 INFO [train.py:823] (2/4) Epoch 21, batch 300, loss[loss=0.1655, simple_loss=0.2619, pruned_loss=0.03454, over 6465.00 frames.], tot_loss[loss=0.1812, simple_loss=0.268, pruned_loss=0.04722, over 1101161.62 frames.], batch size: 34, lr: 8.05e-04 +2022-05-27 18:16:17,448 INFO [train.py:823] (2/4) Epoch 21, batch 350, loss[loss=0.1904, simple_loss=0.2741, pruned_loss=0.05339, over 7427.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2683, pruned_loss=0.04707, over 1171318.72 frames.], batch size: 22, lr: 8.04e-04 +2022-05-27 18:16:56,520 INFO [train.py:823] (2/4) Epoch 21, batch 400, loss[loss=0.1562, simple_loss=0.2384, pruned_loss=0.03706, over 7283.00 frames.], tot_loss[loss=0.1805, simple_loss=0.2679, pruned_loss=0.04654, over 1225941.17 frames.], batch size: 17, lr: 8.03e-04 +2022-05-27 18:17:36,047 INFO [train.py:823] (2/4) Epoch 21, batch 450, loss[loss=0.1845, simple_loss=0.2769, pruned_loss=0.04605, over 7190.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2675, pruned_loss=0.04685, over 1270024.51 frames.], batch size: 21, lr: 8.02e-04 +2022-05-27 18:18:15,379 INFO [train.py:823] (2/4) Epoch 21, batch 500, loss[loss=0.2004, simple_loss=0.2786, pruned_loss=0.06104, over 7189.00 frames.], tot_loss[loss=0.18, simple_loss=0.267, pruned_loss=0.0465, over 1303395.62 frames.], batch size: 18, lr: 8.01e-04 +2022-05-27 18:18:54,658 INFO [train.py:823] (2/4) Epoch 21, batch 550, loss[loss=0.1898, simple_loss=0.2827, pruned_loss=0.0485, over 7367.00 frames.], tot_loss[loss=0.1784, simple_loss=0.2657, pruned_loss=0.04548, over 1334899.12 frames.], batch size: 21, lr: 8.00e-04 +2022-05-27 18:19:33,838 INFO [train.py:823] (2/4) Epoch 21, batch 600, loss[loss=0.1847, simple_loss=0.2813, pruned_loss=0.04407, over 6433.00 frames.], tot_loss[loss=0.1802, simple_loss=0.2675, pruned_loss=0.04646, over 1353011.94 frames.], batch size: 34, lr: 8.00e-04 +2022-05-27 18:20:13,185 INFO [train.py:823] (2/4) Epoch 21, batch 650, loss[loss=0.1694, simple_loss=0.2595, pruned_loss=0.03969, over 7291.00 frames.], tot_loss[loss=0.1803, simple_loss=0.2678, pruned_loss=0.04642, over 1369277.73 frames.], batch size: 22, lr: 7.99e-04 +2022-05-27 18:20:52,549 INFO [train.py:823] (2/4) Epoch 21, batch 700, loss[loss=0.1851, simple_loss=0.2724, pruned_loss=0.04885, over 7197.00 frames.], tot_loss[loss=0.1807, simple_loss=0.2681, pruned_loss=0.04668, over 1379958.74 frames.], batch size: 20, lr: 7.98e-04 +2022-05-27 18:21:31,567 INFO [train.py:823] (2/4) Epoch 21, batch 750, loss[loss=0.19, simple_loss=0.2902, pruned_loss=0.04485, over 7199.00 frames.], tot_loss[loss=0.1792, simple_loss=0.2665, pruned_loss=0.04598, over 1377666.62 frames.], batch size: 25, lr: 7.97e-04 +2022-05-27 18:22:10,773 INFO [train.py:823] (2/4) Epoch 21, batch 800, loss[loss=0.1967, simple_loss=0.2783, pruned_loss=0.05755, over 7333.00 frames.], tot_loss[loss=0.1801, simple_loss=0.2674, pruned_loss=0.04642, over 1383349.38 frames.], batch size: 23, lr: 7.96e-04 +2022-05-27 18:22:50,133 INFO [train.py:823] (2/4) Epoch 21, batch 850, loss[loss=0.1776, simple_loss=0.2717, pruned_loss=0.04179, over 7195.00 frames.], tot_loss[loss=0.1796, simple_loss=0.267, pruned_loss=0.04611, over 1388115.85 frames.], batch size: 20, lr: 7.95e-04 +2022-05-27 18:23:29,143 INFO [train.py:823] (2/4) Epoch 21, batch 900, loss[loss=0.1942, simple_loss=0.275, pruned_loss=0.05665, over 7375.00 frames.], tot_loss[loss=0.1793, simple_loss=0.2669, pruned_loss=0.04584, over 1387285.08 frames.], batch size: 20, lr: 7.94e-04 +2022-05-27 18:24:19,599 INFO [train.py:823] (2/4) Epoch 22, batch 0, loss[loss=0.1696, simple_loss=0.2624, pruned_loss=0.03839, over 7383.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2624, pruned_loss=0.03839, over 7383.00 frames.], batch size: 21, lr: 7.75e-04 +2022-05-27 18:25:00,070 INFO [train.py:823] (2/4) Epoch 22, batch 50, loss[loss=0.2187, simple_loss=0.3059, pruned_loss=0.0657, over 7175.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2618, pruned_loss=0.04465, over 321529.77 frames.], batch size: 22, lr: 7.74e-04 +2022-05-27 18:25:39,937 INFO [train.py:823] (2/4) Epoch 22, batch 100, loss[loss=0.1954, simple_loss=0.2899, pruned_loss=0.05042, over 7102.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2639, pruned_loss=0.04469, over 567431.07 frames.], batch size: 20, lr: 7.73e-04 +2022-05-27 18:26:18,868 INFO [train.py:823] (2/4) Epoch 22, batch 150, loss[loss=0.1962, simple_loss=0.2695, pruned_loss=0.06145, over 4798.00 frames.], tot_loss[loss=0.1795, simple_loss=0.2665, pruned_loss=0.04628, over 753995.38 frames.], batch size: 46, lr: 7.73e-04 +2022-05-27 18:26:59,553 INFO [train.py:823] (2/4) Epoch 22, batch 200, loss[loss=0.1807, simple_loss=0.2828, pruned_loss=0.03936, over 7107.00 frames.], tot_loss[loss=0.1774, simple_loss=0.2652, pruned_loss=0.0448, over 899182.54 frames.], batch size: 20, lr: 7.72e-04 +2022-05-27 18:27:38,626 INFO [train.py:823] (2/4) Epoch 22, batch 250, loss[loss=0.1745, simple_loss=0.2482, pruned_loss=0.05035, over 7089.00 frames.], tot_loss[loss=0.1773, simple_loss=0.265, pruned_loss=0.0448, over 1016989.90 frames.], batch size: 18, lr: 7.71e-04 +2022-05-27 18:28:18,137 INFO [train.py:823] (2/4) Epoch 22, batch 300, loss[loss=0.1913, simple_loss=0.2711, pruned_loss=0.05578, over 7182.00 frames.], tot_loss[loss=0.1775, simple_loss=0.2652, pruned_loss=0.04495, over 1103050.86 frames.], batch size: 18, lr: 7.70e-04 +2022-05-27 18:28:58,689 INFO [train.py:823] (2/4) Epoch 22, batch 350, loss[loss=0.1696, simple_loss=0.2655, pruned_loss=0.03682, over 6946.00 frames.], tot_loss[loss=0.179, simple_loss=0.2661, pruned_loss=0.04592, over 1175054.00 frames.], batch size: 29, lr: 7.69e-04 +2022-05-27 18:29:37,848 INFO [train.py:823] (2/4) Epoch 22, batch 400, loss[loss=0.224, simple_loss=0.3147, pruned_loss=0.06659, over 7185.00 frames.], tot_loss[loss=0.1785, simple_loss=0.2657, pruned_loss=0.04567, over 1231049.13 frames.], batch size: 21, lr: 7.68e-04 +2022-05-27 18:30:17,528 INFO [train.py:823] (2/4) Epoch 22, batch 450, loss[loss=0.166, simple_loss=0.2435, pruned_loss=0.04429, over 7209.00 frames.], tot_loss[loss=0.1779, simple_loss=0.2651, pruned_loss=0.04535, over 1277095.01 frames.], batch size: 16, lr: 7.67e-04 +2022-05-27 18:30:56,724 INFO [train.py:823] (2/4) Epoch 22, batch 500, loss[loss=0.1738, simple_loss=0.262, pruned_loss=0.04279, over 6662.00 frames.], tot_loss[loss=0.1777, simple_loss=0.265, pruned_loss=0.04518, over 1303554.27 frames.], batch size: 34, lr: 7.66e-04 +2022-05-27 18:31:36,001 INFO [train.py:823] (2/4) Epoch 22, batch 550, loss[loss=0.1962, simple_loss=0.2931, pruned_loss=0.04968, over 6926.00 frames.], tot_loss[loss=0.1765, simple_loss=0.264, pruned_loss=0.04445, over 1330271.59 frames.], batch size: 29, lr: 7.65e-04 +2022-05-27 18:32:15,242 INFO [train.py:823] (2/4) Epoch 22, batch 600, loss[loss=0.2053, simple_loss=0.27, pruned_loss=0.07032, over 7006.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2644, pruned_loss=0.04489, over 1349062.84 frames.], batch size: 17, lr: 7.65e-04 +2022-05-27 18:32:54,282 INFO [train.py:823] (2/4) Epoch 22, batch 650, loss[loss=0.1816, simple_loss=0.2712, pruned_loss=0.04594, over 7108.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2645, pruned_loss=0.04491, over 1358461.34 frames.], batch size: 20, lr: 7.64e-04 +2022-05-27 18:33:33,458 INFO [train.py:823] (2/4) Epoch 22, batch 700, loss[loss=0.1793, simple_loss=0.2756, pruned_loss=0.04149, over 7104.00 frames.], tot_loss[loss=0.1775, simple_loss=0.2651, pruned_loss=0.045, over 1371147.83 frames.], batch size: 19, lr: 7.63e-04 +2022-05-27 18:34:12,391 INFO [train.py:823] (2/4) Epoch 22, batch 750, loss[loss=0.1482, simple_loss=0.2354, pruned_loss=0.0305, over 7017.00 frames.], tot_loss[loss=0.1776, simple_loss=0.265, pruned_loss=0.04513, over 1380876.96 frames.], batch size: 16, lr: 7.62e-04 +2022-05-27 18:34:51,821 INFO [train.py:823] (2/4) Epoch 22, batch 800, loss[loss=0.1674, simple_loss=0.2604, pruned_loss=0.03718, over 7373.00 frames.], tot_loss[loss=0.177, simple_loss=0.2646, pruned_loss=0.04473, over 1390801.03 frames.], batch size: 20, lr: 7.61e-04 +2022-05-27 18:35:31,019 INFO [train.py:823] (2/4) Epoch 22, batch 850, loss[loss=0.1719, simple_loss=0.2687, pruned_loss=0.03755, over 6417.00 frames.], tot_loss[loss=0.1776, simple_loss=0.265, pruned_loss=0.04515, over 1399257.20 frames.], batch size: 34, lr: 7.60e-04 +2022-05-27 18:36:10,329 INFO [train.py:823] (2/4) Epoch 22, batch 900, loss[loss=0.2038, simple_loss=0.2976, pruned_loss=0.05501, over 7149.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2657, pruned_loss=0.04527, over 1403749.54 frames.], batch size: 23, lr: 7.59e-04 +2022-05-27 18:37:01,219 INFO [train.py:823] (2/4) Epoch 23, batch 0, loss[loss=0.1462, simple_loss=0.2288, pruned_loss=0.03178, over 6798.00 frames.], tot_loss[loss=0.1462, simple_loss=0.2288, pruned_loss=0.03178, over 6798.00 frames.], batch size: 15, lr: 7.42e-04 +2022-05-27 18:37:41,646 INFO [train.py:823] (2/4) Epoch 23, batch 50, loss[loss=0.1581, simple_loss=0.2569, pruned_loss=0.02961, over 7375.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2609, pruned_loss=0.04244, over 320400.55 frames.], batch size: 21, lr: 7.41e-04 +2022-05-27 18:38:20,902 INFO [train.py:823] (2/4) Epoch 23, batch 100, loss[loss=0.1708, simple_loss=0.2584, pruned_loss=0.0416, over 7379.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2636, pruned_loss=0.04384, over 562691.38 frames.], batch size: 20, lr: 7.41e-04 +2022-05-27 18:39:00,186 INFO [train.py:823] (2/4) Epoch 23, batch 150, loss[loss=0.1501, simple_loss=0.2283, pruned_loss=0.03597, over 7289.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2643, pruned_loss=0.04411, over 752503.41 frames.], batch size: 18, lr: 7.40e-04 +2022-05-27 18:39:39,876 INFO [train.py:823] (2/4) Epoch 23, batch 200, loss[loss=0.2028, simple_loss=0.2893, pruned_loss=0.0581, over 4960.00 frames.], tot_loss[loss=0.1757, simple_loss=0.264, pruned_loss=0.04367, over 899592.05 frames.], batch size: 47, lr: 7.39e-04 +2022-05-27 18:40:19,191 INFO [train.py:823] (2/4) Epoch 23, batch 250, loss[loss=0.1743, simple_loss=0.2474, pruned_loss=0.0506, over 7098.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2647, pruned_loss=0.04423, over 1019271.30 frames.], batch size: 18, lr: 7.38e-04 +2022-05-27 18:40:58,269 INFO [train.py:823] (2/4) Epoch 23, batch 300, loss[loss=0.1616, simple_loss=0.2602, pruned_loss=0.03149, over 7299.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2657, pruned_loss=0.04437, over 1112204.45 frames.], batch size: 22, lr: 7.37e-04 +2022-05-27 18:41:37,503 INFO [train.py:823] (2/4) Epoch 23, batch 350, loss[loss=0.1773, simple_loss=0.2651, pruned_loss=0.04476, over 7279.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2653, pruned_loss=0.04463, over 1183018.63 frames.], batch size: 20, lr: 7.36e-04 +2022-05-27 18:42:16,398 INFO [train.py:823] (2/4) Epoch 23, batch 400, loss[loss=0.1335, simple_loss=0.2243, pruned_loss=0.02137, over 7303.00 frames.], tot_loss[loss=0.176, simple_loss=0.2639, pruned_loss=0.04411, over 1235253.10 frames.], batch size: 17, lr: 7.36e-04 +2022-05-27 18:42:55,441 INFO [train.py:823] (2/4) Epoch 23, batch 450, loss[loss=0.1889, simple_loss=0.2666, pruned_loss=0.05564, over 4900.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2636, pruned_loss=0.04403, over 1273782.47 frames.], batch size: 47, lr: 7.35e-04 +2022-05-27 18:43:34,533 INFO [train.py:823] (2/4) Epoch 23, batch 500, loss[loss=0.1739, simple_loss=0.2713, pruned_loss=0.03823, over 6654.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2642, pruned_loss=0.04381, over 1302548.31 frames.], batch size: 34, lr: 7.34e-04 +2022-05-27 18:44:13,797 INFO [train.py:823] (2/4) Epoch 23, batch 550, loss[loss=0.211, simple_loss=0.3044, pruned_loss=0.05879, over 7231.00 frames.], tot_loss[loss=0.1762, simple_loss=0.2649, pruned_loss=0.04376, over 1333553.45 frames.], batch size: 24, lr: 7.33e-04 +2022-05-27 18:44:52,836 INFO [train.py:823] (2/4) Epoch 23, batch 600, loss[loss=0.1821, simple_loss=0.2688, pruned_loss=0.04767, over 4936.00 frames.], tot_loss[loss=0.1768, simple_loss=0.2648, pruned_loss=0.04443, over 1349643.48 frames.], batch size: 46, lr: 7.32e-04 +2022-05-27 18:45:32,303 INFO [train.py:823] (2/4) Epoch 23, batch 650, loss[loss=0.1417, simple_loss=0.2278, pruned_loss=0.02776, over 7099.00 frames.], tot_loss[loss=0.1765, simple_loss=0.2645, pruned_loss=0.04427, over 1364095.52 frames.], batch size: 19, lr: 7.32e-04 +2022-05-27 18:46:11,162 INFO [train.py:823] (2/4) Epoch 23, batch 700, loss[loss=0.1891, simple_loss=0.2543, pruned_loss=0.06195, over 7003.00 frames.], tot_loss[loss=0.1771, simple_loss=0.2645, pruned_loss=0.04489, over 1370864.20 frames.], batch size: 16, lr: 7.31e-04 +2022-05-27 18:46:50,490 INFO [train.py:823] (2/4) Epoch 23, batch 750, loss[loss=0.193, simple_loss=0.2744, pruned_loss=0.05573, over 4925.00 frames.], tot_loss[loss=0.1764, simple_loss=0.2642, pruned_loss=0.04437, over 1375867.84 frames.], batch size: 48, lr: 7.30e-04 +2022-05-27 18:47:30,857 INFO [train.py:823] (2/4) Epoch 23, batch 800, loss[loss=0.1666, simple_loss=0.2442, pruned_loss=0.0445, over 7187.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2635, pruned_loss=0.04413, over 1387544.87 frames.], batch size: 18, lr: 7.29e-04 +2022-05-27 18:48:10,101 INFO [train.py:823] (2/4) Epoch 23, batch 850, loss[loss=0.1963, simple_loss=0.2811, pruned_loss=0.05575, over 7148.00 frames.], tot_loss[loss=0.176, simple_loss=0.2638, pruned_loss=0.04414, over 1395425.13 frames.], batch size: 23, lr: 7.28e-04 +2022-05-27 18:48:48,829 INFO [train.py:823] (2/4) Epoch 23, batch 900, loss[loss=0.1857, simple_loss=0.2664, pruned_loss=0.05248, over 7013.00 frames.], tot_loss[loss=0.1751, simple_loss=0.2628, pruned_loss=0.04375, over 1400723.56 frames.], batch size: 17, lr: 7.28e-04 +2022-05-27 18:49:41,164 INFO [train.py:823] (2/4) Epoch 24, batch 0, loss[loss=0.1483, simple_loss=0.2318, pruned_loss=0.03238, over 7313.00 frames.], tot_loss[loss=0.1483, simple_loss=0.2318, pruned_loss=0.03238, over 7313.00 frames.], batch size: 18, lr: 7.12e-04 +2022-05-27 18:50:19,975 INFO [train.py:823] (2/4) Epoch 24, batch 50, loss[loss=0.1734, simple_loss=0.257, pruned_loss=0.04489, over 7157.00 frames.], tot_loss[loss=0.1764, simple_loss=0.2652, pruned_loss=0.04378, over 319565.04 frames.], batch size: 17, lr: 7.11e-04 +2022-05-27 18:51:00,522 INFO [train.py:823] (2/4) Epoch 24, batch 100, loss[loss=0.1538, simple_loss=0.2472, pruned_loss=0.03017, over 6548.00 frames.], tot_loss[loss=0.1757, simple_loss=0.2641, pruned_loss=0.04369, over 560300.64 frames.], batch size: 34, lr: 7.10e-04 +2022-05-27 18:51:39,738 INFO [train.py:823] (2/4) Epoch 24, batch 150, loss[loss=0.1542, simple_loss=0.2493, pruned_loss=0.02959, over 6996.00 frames.], tot_loss[loss=0.1762, simple_loss=0.2646, pruned_loss=0.04388, over 750894.71 frames.], batch size: 29, lr: 7.10e-04 +2022-05-27 18:52:18,838 INFO [train.py:823] (2/4) Epoch 24, batch 200, loss[loss=0.1678, simple_loss=0.2643, pruned_loss=0.03567, over 7280.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2641, pruned_loss=0.04384, over 900466.69 frames.], batch size: 21, lr: 7.09e-04 +2022-05-27 18:52:58,095 INFO [train.py:823] (2/4) Epoch 24, batch 250, loss[loss=0.1458, simple_loss=0.2239, pruned_loss=0.03384, over 7312.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2627, pruned_loss=0.04329, over 1016035.25 frames.], batch size: 17, lr: 7.08e-04 +2022-05-27 18:53:37,194 INFO [train.py:823] (2/4) Epoch 24, batch 300, loss[loss=0.1896, simple_loss=0.2859, pruned_loss=0.04662, over 7331.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2633, pruned_loss=0.04378, over 1099920.42 frames.], batch size: 23, lr: 7.07e-04 +2022-05-27 18:54:16,006 INFO [train.py:823] (2/4) Epoch 24, batch 350, loss[loss=0.165, simple_loss=0.2479, pruned_loss=0.04108, over 7300.00 frames.], tot_loss[loss=0.175, simple_loss=0.2626, pruned_loss=0.04368, over 1175014.31 frames.], batch size: 17, lr: 7.07e-04 +2022-05-27 18:54:55,408 INFO [train.py:823] (2/4) Epoch 24, batch 400, loss[loss=0.2147, simple_loss=0.2987, pruned_loss=0.06532, over 7325.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2616, pruned_loss=0.04344, over 1226960.18 frames.], batch size: 23, lr: 7.06e-04 +2022-05-27 18:55:34,361 INFO [train.py:823] (2/4) Epoch 24, batch 450, loss[loss=0.1391, simple_loss=0.2286, pruned_loss=0.0248, over 7192.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2618, pruned_loss=0.04342, over 1268047.42 frames.], batch size: 18, lr: 7.05e-04 +2022-05-27 18:56:13,810 INFO [train.py:823] (2/4) Epoch 24, batch 500, loss[loss=0.1832, simple_loss=0.2774, pruned_loss=0.04445, over 7275.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2623, pruned_loss=0.04322, over 1303730.98 frames.], batch size: 21, lr: 7.04e-04 +2022-05-27 18:56:52,934 INFO [train.py:823] (2/4) Epoch 24, batch 550, loss[loss=0.163, simple_loss=0.2607, pruned_loss=0.03265, over 6451.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2626, pruned_loss=0.04306, over 1327863.35 frames.], batch size: 34, lr: 7.04e-04 +2022-05-27 18:57:32,010 INFO [train.py:823] (2/4) Epoch 24, batch 600, loss[loss=0.1873, simple_loss=0.282, pruned_loss=0.04629, over 7168.00 frames.], tot_loss[loss=0.1749, simple_loss=0.2632, pruned_loss=0.04329, over 1346853.40 frames.], batch size: 23, lr: 7.03e-04 +2022-05-27 18:58:10,819 INFO [train.py:823] (2/4) Epoch 24, batch 650, loss[loss=0.1738, simple_loss=0.2666, pruned_loss=0.04048, over 7099.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2633, pruned_loss=0.04301, over 1359080.63 frames.], batch size: 19, lr: 7.02e-04 +2022-05-27 18:58:49,817 INFO [train.py:823] (2/4) Epoch 24, batch 700, loss[loss=0.1682, simple_loss=0.2689, pruned_loss=0.03376, over 7168.00 frames.], tot_loss[loss=0.1752, simple_loss=0.2636, pruned_loss=0.04338, over 1371980.17 frames.], batch size: 22, lr: 7.01e-04 +2022-05-27 18:59:29,045 INFO [train.py:823] (2/4) Epoch 24, batch 750, loss[loss=0.1659, simple_loss=0.2689, pruned_loss=0.0315, over 7108.00 frames.], tot_loss[loss=0.1744, simple_loss=0.2632, pruned_loss=0.04284, over 1384815.43 frames.], batch size: 20, lr: 7.01e-04 +2022-05-27 19:00:08,843 INFO [train.py:823] (2/4) Epoch 24, batch 800, loss[loss=0.1291, simple_loss=0.2165, pruned_loss=0.02087, over 6828.00 frames.], tot_loss[loss=0.1741, simple_loss=0.2627, pruned_loss=0.04269, over 1391688.33 frames.], batch size: 15, lr: 7.00e-04 +2022-05-27 19:00:47,535 INFO [train.py:823] (2/4) Epoch 24, batch 850, loss[loss=0.1602, simple_loss=0.2576, pruned_loss=0.03135, over 7112.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2622, pruned_loss=0.04247, over 1395324.35 frames.], batch size: 20, lr: 6.99e-04 +2022-05-27 19:01:28,131 INFO [train.py:823] (2/4) Epoch 24, batch 900, loss[loss=0.2046, simple_loss=0.2901, pruned_loss=0.05954, over 6556.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2623, pruned_loss=0.0421, over 1398309.93 frames.], batch size: 34, lr: 6.98e-04 +2022-05-27 19:02:07,105 INFO [train.py:823] (2/4) Epoch 24, batch 950, loss[loss=0.1483, simple_loss=0.2389, pruned_loss=0.02887, over 7098.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2625, pruned_loss=0.04234, over 1395538.50 frames.], batch size: 18, lr: 6.98e-04 +2022-05-27 19:02:19,688 INFO [train.py:823] (2/4) Epoch 25, batch 0, loss[loss=0.1876, simple_loss=0.2776, pruned_loss=0.04883, over 7274.00 frames.], tot_loss[loss=0.1876, simple_loss=0.2776, pruned_loss=0.04883, over 7274.00 frames.], batch size: 21, lr: 6.84e-04 +2022-05-27 19:02:58,869 INFO [train.py:823] (2/4) Epoch 25, batch 50, loss[loss=0.1438, simple_loss=0.2228, pruned_loss=0.03237, over 7284.00 frames.], tot_loss[loss=0.1753, simple_loss=0.263, pruned_loss=0.04378, over 324545.41 frames.], batch size: 17, lr: 6.83e-04 +2022-05-27 19:03:37,893 INFO [train.py:823] (2/4) Epoch 25, batch 100, loss[loss=0.1756, simple_loss=0.2597, pruned_loss=0.04579, over 6806.00 frames.], tot_loss[loss=0.173, simple_loss=0.2606, pruned_loss=0.04267, over 564672.71 frames.], batch size: 15, lr: 6.82e-04 +2022-05-27 19:04:17,002 INFO [train.py:823] (2/4) Epoch 25, batch 150, loss[loss=0.1809, simple_loss=0.2732, pruned_loss=0.04433, over 7311.00 frames.], tot_loss[loss=0.172, simple_loss=0.2594, pruned_loss=0.04225, over 759596.81 frames.], batch size: 22, lr: 6.82e-04 +2022-05-27 19:04:56,397 INFO [train.py:823] (2/4) Epoch 25, batch 200, loss[loss=0.1893, simple_loss=0.2759, pruned_loss=0.05141, over 7289.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2611, pruned_loss=0.04235, over 912078.67 frames.], batch size: 21, lr: 6.81e-04 +2022-05-27 19:05:35,282 INFO [train.py:823] (2/4) Epoch 25, batch 250, loss[loss=0.1439, simple_loss=0.2252, pruned_loss=0.03133, over 7300.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2606, pruned_loss=0.04215, over 1024214.63 frames.], batch size: 17, lr: 6.80e-04 +2022-05-27 19:06:14,211 INFO [train.py:823] (2/4) Epoch 25, batch 300, loss[loss=0.1771, simple_loss=0.27, pruned_loss=0.0421, over 7282.00 frames.], tot_loss[loss=0.1728, simple_loss=0.2608, pruned_loss=0.0424, over 1117686.34 frames.], batch size: 21, lr: 6.80e-04 +2022-05-27 19:06:53,470 INFO [train.py:823] (2/4) Epoch 25, batch 350, loss[loss=0.1971, simple_loss=0.2875, pruned_loss=0.05336, over 7135.00 frames.], tot_loss[loss=0.1733, simple_loss=0.2614, pruned_loss=0.04258, over 1183891.47 frames.], batch size: 23, lr: 6.79e-04 +2022-05-27 19:07:32,654 INFO [train.py:823] (2/4) Epoch 25, batch 400, loss[loss=0.1741, simple_loss=0.2704, pruned_loss=0.03896, over 7214.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2631, pruned_loss=0.04309, over 1239862.21 frames.], batch size: 25, lr: 6.78e-04 +2022-05-27 19:08:11,890 INFO [train.py:823] (2/4) Epoch 25, batch 450, loss[loss=0.1388, simple_loss=0.2175, pruned_loss=0.03004, over 7207.00 frames.], tot_loss[loss=0.175, simple_loss=0.2637, pruned_loss=0.04313, over 1271775.27 frames.], batch size: 16, lr: 6.77e-04 +2022-05-27 19:08:50,577 INFO [train.py:823] (2/4) Epoch 25, batch 500, loss[loss=0.1476, simple_loss=0.2176, pruned_loss=0.03879, over 6988.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2626, pruned_loss=0.0429, over 1304391.06 frames.], batch size: 16, lr: 6.77e-04 +2022-05-27 19:09:29,838 INFO [train.py:823] (2/4) Epoch 25, batch 550, loss[loss=0.1837, simple_loss=0.2691, pruned_loss=0.04912, over 7188.00 frames.], tot_loss[loss=0.1734, simple_loss=0.2615, pruned_loss=0.04265, over 1330813.61 frames.], batch size: 21, lr: 6.76e-04 +2022-05-27 19:10:08,998 INFO [train.py:823] (2/4) Epoch 25, batch 600, loss[loss=0.1664, simple_loss=0.2639, pruned_loss=0.03444, over 7284.00 frames.], tot_loss[loss=0.1729, simple_loss=0.2607, pruned_loss=0.04259, over 1343552.27 frames.], batch size: 21, lr: 6.75e-04 +2022-05-27 19:10:49,699 INFO [train.py:823] (2/4) Epoch 25, batch 650, loss[loss=0.148, simple_loss=0.2399, pruned_loss=0.02805, over 7282.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2609, pruned_loss=0.04229, over 1358380.89 frames.], batch size: 20, lr: 6.75e-04 +2022-05-27 19:11:29,030 INFO [train.py:823] (2/4) Epoch 25, batch 700, loss[loss=0.1619, simple_loss=0.2417, pruned_loss=0.04108, over 7153.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2603, pruned_loss=0.04167, over 1370804.39 frames.], batch size: 17, lr: 6.74e-04 +2022-05-27 19:12:08,345 INFO [train.py:823] (2/4) Epoch 25, batch 750, loss[loss=0.1511, simple_loss=0.2406, pruned_loss=0.03079, over 7382.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2597, pruned_loss=0.04137, over 1378657.19 frames.], batch size: 20, lr: 6.73e-04 +2022-05-27 19:12:47,493 INFO [train.py:823] (2/4) Epoch 25, batch 800, loss[loss=0.1765, simple_loss=0.2669, pruned_loss=0.04299, over 7180.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2593, pruned_loss=0.04146, over 1390359.75 frames.], batch size: 21, lr: 6.73e-04 +2022-05-27 19:13:27,829 INFO [train.py:823] (2/4) Epoch 25, batch 850, loss[loss=0.1395, simple_loss=0.2333, pruned_loss=0.02288, over 7191.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2594, pruned_loss=0.04123, over 1395545.93 frames.], batch size: 18, lr: 6.72e-04 +2022-05-27 19:14:08,776 INFO [train.py:823] (2/4) Epoch 25, batch 900, loss[loss=0.151, simple_loss=0.2584, pruned_loss=0.02181, over 6548.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2593, pruned_loss=0.04104, over 1394530.97 frames.], batch size: 34, lr: 6.71e-04 +2022-05-27 19:14:59,628 INFO [train.py:823] (2/4) Epoch 26, batch 0, loss[loss=0.1746, simple_loss=0.2603, pruned_loss=0.04446, over 7313.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2603, pruned_loss=0.04446, over 7313.00 frames.], batch size: 18, lr: 6.58e-04 +2022-05-27 19:15:38,647 INFO [train.py:823] (2/4) Epoch 26, batch 50, loss[loss=0.1642, simple_loss=0.2493, pruned_loss=0.03955, over 7370.00 frames.], tot_loss[loss=0.168, simple_loss=0.256, pruned_loss=0.03995, over 323536.76 frames.], batch size: 20, lr: 6.57e-04 +2022-05-27 19:16:17,979 INFO [train.py:823] (2/4) Epoch 26, batch 100, loss[loss=0.1584, simple_loss=0.2553, pruned_loss=0.03071, over 7232.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2596, pruned_loss=0.04211, over 567390.63 frames.], batch size: 25, lr: 6.56e-04 +2022-05-27 19:16:57,092 INFO [train.py:823] (2/4) Epoch 26, batch 150, loss[loss=0.1782, simple_loss=0.2661, pruned_loss=0.04519, over 7180.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2593, pruned_loss=0.04199, over 753680.71 frames.], batch size: 25, lr: 6.56e-04 +2022-05-27 19:17:36,124 INFO [train.py:823] (2/4) Epoch 26, batch 200, loss[loss=0.1551, simple_loss=0.2392, pruned_loss=0.0355, over 7094.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2602, pruned_loss=0.042, over 900490.38 frames.], batch size: 18, lr: 6.55e-04 +2022-05-27 19:18:15,523 INFO [train.py:823] (2/4) Epoch 26, batch 250, loss[loss=0.1792, simple_loss=0.2758, pruned_loss=0.04126, over 7420.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2606, pruned_loss=0.04203, over 1015893.39 frames.], batch size: 22, lr: 6.55e-04 +2022-05-27 19:18:59,359 INFO [train.py:823] (2/4) Epoch 26, batch 300, loss[loss=0.1464, simple_loss=0.2334, pruned_loss=0.02969, over 7122.00 frames.], tot_loss[loss=0.1714, simple_loss=0.2597, pruned_loss=0.04155, over 1106097.86 frames.], batch size: 20, lr: 6.54e-04 +2022-05-27 19:19:38,180 INFO [train.py:823] (2/4) Epoch 26, batch 350, loss[loss=0.2045, simple_loss=0.2851, pruned_loss=0.06194, over 6468.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2593, pruned_loss=0.04122, over 1177950.06 frames.], batch size: 34, lr: 6.53e-04 +2022-05-27 19:20:17,251 INFO [train.py:823] (2/4) Epoch 26, batch 400, loss[loss=0.1812, simple_loss=0.2746, pruned_loss=0.04394, over 7154.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2592, pruned_loss=0.04124, over 1234215.13 frames.], batch size: 23, lr: 6.53e-04 +2022-05-27 19:20:55,958 INFO [train.py:823] (2/4) Epoch 26, batch 450, loss[loss=0.1818, simple_loss=0.2782, pruned_loss=0.0427, over 7190.00 frames.], tot_loss[loss=0.1709, simple_loss=0.2595, pruned_loss=0.04117, over 1273571.42 frames.], batch size: 21, lr: 6.52e-04 +2022-05-27 19:21:34,742 INFO [train.py:823] (2/4) Epoch 26, batch 500, loss[loss=0.1754, simple_loss=0.2748, pruned_loss=0.03799, over 6972.00 frames.], tot_loss[loss=0.172, simple_loss=0.2602, pruned_loss=0.04189, over 1303778.05 frames.], batch size: 26, lr: 6.51e-04 +2022-05-27 19:22:13,815 INFO [train.py:823] (2/4) Epoch 26, batch 550, loss[loss=0.1407, simple_loss=0.2226, pruned_loss=0.02939, over 7001.00 frames.], tot_loss[loss=0.171, simple_loss=0.2591, pruned_loss=0.04143, over 1326494.98 frames.], batch size: 16, lr: 6.51e-04 +2022-05-27 19:22:52,209 INFO [train.py:823] (2/4) Epoch 26, batch 600, loss[loss=0.1636, simple_loss=0.271, pruned_loss=0.02809, over 7293.00 frames.], tot_loss[loss=0.1714, simple_loss=0.2596, pruned_loss=0.04155, over 1347001.42 frames.], batch size: 22, lr: 6.50e-04 +2022-05-27 19:23:30,982 INFO [train.py:823] (2/4) Epoch 26, batch 650, loss[loss=0.1868, simple_loss=0.2788, pruned_loss=0.0474, over 7339.00 frames.], tot_loss[loss=0.1701, simple_loss=0.2582, pruned_loss=0.04099, over 1355991.18 frames.], batch size: 23, lr: 6.49e-04 +2022-05-27 19:24:10,406 INFO [train.py:823] (2/4) Epoch 26, batch 700, loss[loss=0.1751, simple_loss=0.2529, pruned_loss=0.0486, over 7026.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2593, pruned_loss=0.0414, over 1369656.08 frames.], batch size: 26, lr: 6.49e-04 +2022-05-27 19:24:49,320 INFO [train.py:823] (2/4) Epoch 26, batch 750, loss[loss=0.158, simple_loss=0.2549, pruned_loss=0.03053, over 7287.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2596, pruned_loss=0.04149, over 1373264.90 frames.], batch size: 19, lr: 6.48e-04 +2022-05-27 19:25:29,489 INFO [train.py:823] (2/4) Epoch 26, batch 800, loss[loss=0.1627, simple_loss=0.2366, pruned_loss=0.04439, over 6788.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2598, pruned_loss=0.04169, over 1381290.91 frames.], batch size: 15, lr: 6.47e-04 +2022-05-27 19:26:08,517 INFO [train.py:823] (2/4) Epoch 26, batch 850, loss[loss=0.1587, simple_loss=0.2414, pruned_loss=0.03798, over 6758.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2599, pruned_loss=0.04199, over 1392900.52 frames.], batch size: 15, lr: 6.47e-04 +2022-05-27 19:26:47,855 INFO [train.py:823] (2/4) Epoch 26, batch 900, loss[loss=0.137, simple_loss=0.2239, pruned_loss=0.02501, over 7024.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2605, pruned_loss=0.04191, over 1395156.66 frames.], batch size: 17, lr: 6.46e-04 +2022-05-27 19:27:39,210 INFO [train.py:823] (2/4) Epoch 27, batch 0, loss[loss=0.1504, simple_loss=0.2336, pruned_loss=0.03356, over 7185.00 frames.], tot_loss[loss=0.1504, simple_loss=0.2336, pruned_loss=0.03356, over 7185.00 frames.], batch size: 18, lr: 6.34e-04 +2022-05-27 19:28:18,659 INFO [train.py:823] (2/4) Epoch 27, batch 50, loss[loss=0.1396, simple_loss=0.2308, pruned_loss=0.02421, over 7194.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2553, pruned_loss=0.03911, over 321532.86 frames.], batch size: 18, lr: 6.33e-04 +2022-05-27 19:28:57,624 INFO [train.py:823] (2/4) Epoch 27, batch 100, loss[loss=0.1532, simple_loss=0.2499, pruned_loss=0.02828, over 7222.00 frames.], tot_loss[loss=0.1691, simple_loss=0.258, pruned_loss=0.04007, over 563690.85 frames.], batch size: 25, lr: 6.32e-04 +2022-05-27 19:29:36,507 INFO [train.py:823] (2/4) Epoch 27, batch 150, loss[loss=0.1504, simple_loss=0.2348, pruned_loss=0.03298, over 7295.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2593, pruned_loss=0.04116, over 753281.56 frames.], batch size: 18, lr: 6.32e-04 +2022-05-27 19:30:15,675 INFO [train.py:823] (2/4) Epoch 27, batch 200, loss[loss=0.1838, simple_loss=0.2818, pruned_loss=0.04291, over 7424.00 frames.], tot_loss[loss=0.1706, simple_loss=0.26, pruned_loss=0.04063, over 900275.41 frames.], batch size: 22, lr: 6.31e-04 +2022-05-27 19:30:54,938 INFO [train.py:823] (2/4) Epoch 27, batch 250, loss[loss=0.1454, simple_loss=0.2252, pruned_loss=0.03278, over 7024.00 frames.], tot_loss[loss=0.1703, simple_loss=0.2598, pruned_loss=0.04043, over 1013352.41 frames.], batch size: 17, lr: 6.31e-04 +2022-05-27 19:31:34,019 INFO [train.py:823] (2/4) Epoch 27, batch 300, loss[loss=0.1928, simple_loss=0.2833, pruned_loss=0.05117, over 7378.00 frames.], tot_loss[loss=0.1696, simple_loss=0.259, pruned_loss=0.04005, over 1106925.30 frames.], batch size: 21, lr: 6.30e-04 +2022-05-27 19:32:13,834 INFO [train.py:823] (2/4) Epoch 27, batch 350, loss[loss=0.1562, simple_loss=0.2318, pruned_loss=0.04034, over 7294.00 frames.], tot_loss[loss=0.17, simple_loss=0.2589, pruned_loss=0.04058, over 1177097.05 frames.], batch size: 19, lr: 6.29e-04 +2022-05-27 19:32:52,591 INFO [train.py:823] (2/4) Epoch 27, batch 400, loss[loss=0.1708, simple_loss=0.2634, pruned_loss=0.03906, over 7281.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2586, pruned_loss=0.0406, over 1231517.99 frames.], batch size: 20, lr: 6.29e-04 +2022-05-27 19:33:33,779 INFO [train.py:823] (2/4) Epoch 27, batch 450, loss[loss=0.1855, simple_loss=0.275, pruned_loss=0.04804, over 4945.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2579, pruned_loss=0.04039, over 1275178.40 frames.], batch size: 46, lr: 6.28e-04 +2022-05-27 19:34:12,473 INFO [train.py:823] (2/4) Epoch 27, batch 500, loss[loss=0.2051, simple_loss=0.2892, pruned_loss=0.06048, over 7151.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2579, pruned_loss=0.04073, over 1299885.96 frames.], batch size: 23, lr: 6.28e-04 +2022-05-27 19:34:52,011 INFO [train.py:823] (2/4) Epoch 27, batch 550, loss[loss=0.1657, simple_loss=0.2515, pruned_loss=0.03998, over 7275.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2592, pruned_loss=0.04079, over 1328787.99 frames.], batch size: 20, lr: 6.27e-04 +2022-05-27 19:35:30,738 INFO [train.py:823] (2/4) Epoch 27, batch 600, loss[loss=0.1447, simple_loss=0.2374, pruned_loss=0.02598, over 7303.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2585, pruned_loss=0.04043, over 1355176.92 frames.], batch size: 18, lr: 6.26e-04 +2022-05-27 19:36:10,982 INFO [train.py:823] (2/4) Epoch 27, batch 650, loss[loss=0.1445, simple_loss=0.2307, pruned_loss=0.02913, over 7188.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2587, pruned_loss=0.04034, over 1374172.60 frames.], batch size: 19, lr: 6.26e-04 +2022-05-27 19:36:51,522 INFO [train.py:823] (2/4) Epoch 27, batch 700, loss[loss=0.171, simple_loss=0.263, pruned_loss=0.03947, over 7379.00 frames.], tot_loss[loss=0.1694, simple_loss=0.2586, pruned_loss=0.04011, over 1384558.41 frames.], batch size: 21, lr: 6.25e-04 +2022-05-27 19:37:31,057 INFO [train.py:823] (2/4) Epoch 27, batch 750, loss[loss=0.2007, simple_loss=0.2732, pruned_loss=0.06404, over 7187.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2581, pruned_loss=0.04011, over 1391737.70 frames.], batch size: 19, lr: 6.25e-04 +2022-05-27 19:38:09,941 INFO [train.py:823] (2/4) Epoch 27, batch 800, loss[loss=0.1818, simple_loss=0.2623, pruned_loss=0.05058, over 7164.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2587, pruned_loss=0.04107, over 1393803.02 frames.], batch size: 23, lr: 6.24e-04 +2022-05-27 19:38:49,307 INFO [train.py:823] (2/4) Epoch 27, batch 850, loss[loss=0.1548, simple_loss=0.246, pruned_loss=0.03179, over 7114.00 frames.], tot_loss[loss=0.1696, simple_loss=0.2576, pruned_loss=0.04084, over 1396931.08 frames.], batch size: 20, lr: 6.23e-04 +2022-05-27 19:39:28,658 INFO [train.py:823] (2/4) Epoch 27, batch 900, loss[loss=0.1484, simple_loss=0.237, pruned_loss=0.02994, over 7292.00 frames.], tot_loss[loss=0.169, simple_loss=0.2572, pruned_loss=0.04041, over 1398837.82 frames.], batch size: 17, lr: 6.23e-04 +2022-05-27 19:40:22,728 INFO [train.py:823] (2/4) Epoch 28, batch 0, loss[loss=0.1612, simple_loss=0.2514, pruned_loss=0.03546, over 7199.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2514, pruned_loss=0.03546, over 7199.00 frames.], batch size: 20, lr: 6.11e-04 +2022-05-27 19:41:02,223 INFO [train.py:823] (2/4) Epoch 28, batch 50, loss[loss=0.1923, simple_loss=0.2752, pruned_loss=0.05474, over 7107.00 frames.], tot_loss[loss=0.165, simple_loss=0.2532, pruned_loss=0.03843, over 315571.51 frames.], batch size: 20, lr: 6.11e-04 +2022-05-27 19:41:41,929 INFO [train.py:823] (2/4) Epoch 28, batch 100, loss[loss=0.1696, simple_loss=0.2613, pruned_loss=0.03893, over 7026.00 frames.], tot_loss[loss=0.1667, simple_loss=0.256, pruned_loss=0.0387, over 560884.03 frames.], batch size: 26, lr: 6.10e-04 +2022-05-27 19:42:21,127 INFO [train.py:823] (2/4) Epoch 28, batch 150, loss[loss=0.1671, simple_loss=0.258, pruned_loss=0.03815, over 4691.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2559, pruned_loss=0.03891, over 749279.09 frames.], batch size: 46, lr: 6.09e-04 +2022-05-27 19:43:00,537 INFO [train.py:823] (2/4) Epoch 28, batch 200, loss[loss=0.1921, simple_loss=0.2699, pruned_loss=0.05712, over 7191.00 frames.], tot_loss[loss=0.1664, simple_loss=0.2552, pruned_loss=0.03881, over 899370.59 frames.], batch size: 20, lr: 6.09e-04 +2022-05-27 19:43:39,775 INFO [train.py:823] (2/4) Epoch 28, batch 250, loss[loss=0.152, simple_loss=0.2412, pruned_loss=0.03142, over 7329.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2551, pruned_loss=0.0386, over 1015218.17 frames.], batch size: 23, lr: 6.08e-04 +2022-05-27 19:44:19,109 INFO [train.py:823] (2/4) Epoch 28, batch 300, loss[loss=0.1749, simple_loss=0.271, pruned_loss=0.03938, over 6897.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2558, pruned_loss=0.03945, over 1104447.21 frames.], batch size: 29, lr: 6.08e-04 +2022-05-27 19:44:58,548 INFO [train.py:823] (2/4) Epoch 28, batch 350, loss[loss=0.2049, simple_loss=0.2987, pruned_loss=0.0556, over 7313.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2559, pruned_loss=0.0389, over 1174300.12 frames.], batch size: 23, lr: 6.07e-04 +2022-05-27 19:45:37,865 INFO [train.py:823] (2/4) Epoch 28, batch 400, loss[loss=0.177, simple_loss=0.2759, pruned_loss=0.03908, over 7288.00 frames.], tot_loss[loss=0.1668, simple_loss=0.256, pruned_loss=0.03877, over 1229072.75 frames.], batch size: 21, lr: 6.07e-04 +2022-05-27 19:46:16,843 INFO [train.py:823] (2/4) Epoch 28, batch 450, loss[loss=0.1872, simple_loss=0.2819, pruned_loss=0.04628, over 6937.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2558, pruned_loss=0.03881, over 1269734.63 frames.], batch size: 29, lr: 6.06e-04 +2022-05-27 19:46:56,169 INFO [train.py:823] (2/4) Epoch 28, batch 500, loss[loss=0.1664, simple_loss=0.2609, pruned_loss=0.03591, over 6993.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2555, pruned_loss=0.03893, over 1306191.02 frames.], batch size: 29, lr: 6.06e-04 +2022-05-27 19:47:35,342 INFO [train.py:823] (2/4) Epoch 28, batch 550, loss[loss=0.1826, simple_loss=0.2649, pruned_loss=0.05016, over 7104.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2568, pruned_loss=0.03948, over 1329663.74 frames.], batch size: 20, lr: 6.05e-04 +2022-05-27 19:48:14,502 INFO [train.py:823] (2/4) Epoch 28, batch 600, loss[loss=0.1577, simple_loss=0.2454, pruned_loss=0.03503, over 7198.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2576, pruned_loss=0.0395, over 1347420.39 frames.], batch size: 19, lr: 6.04e-04 +2022-05-27 19:48:53,687 INFO [train.py:823] (2/4) Epoch 28, batch 650, loss[loss=0.1486, simple_loss=0.2409, pruned_loss=0.02812, over 7285.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2572, pruned_loss=0.03934, over 1365606.54 frames.], batch size: 19, lr: 6.04e-04 +2022-05-27 19:49:34,161 INFO [train.py:823] (2/4) Epoch 28, batch 700, loss[loss=0.1558, simple_loss=0.2503, pruned_loss=0.03063, over 7312.00 frames.], tot_loss[loss=0.168, simple_loss=0.2571, pruned_loss=0.03943, over 1375687.45 frames.], batch size: 18, lr: 6.03e-04 +2022-05-27 19:50:13,281 INFO [train.py:823] (2/4) Epoch 28, batch 750, loss[loss=0.1828, simple_loss=0.2729, pruned_loss=0.04631, over 5030.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2573, pruned_loss=0.03972, over 1382447.37 frames.], batch size: 46, lr: 6.03e-04 +2022-05-27 19:50:52,583 INFO [train.py:823] (2/4) Epoch 28, batch 800, loss[loss=0.128, simple_loss=0.2126, pruned_loss=0.02171, over 7011.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2573, pruned_loss=0.03961, over 1394235.40 frames.], batch size: 16, lr: 6.02e-04 +2022-05-27 19:51:31,402 INFO [train.py:823] (2/4) Epoch 28, batch 850, loss[loss=0.1476, simple_loss=0.2447, pruned_loss=0.02524, over 7378.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2576, pruned_loss=0.0395, over 1398526.04 frames.], batch size: 21, lr: 6.02e-04 +2022-05-27 19:52:10,791 INFO [train.py:823] (2/4) Epoch 28, batch 900, loss[loss=0.1666, simple_loss=0.268, pruned_loss=0.03259, over 7369.00 frames.], tot_loss[loss=0.168, simple_loss=0.2574, pruned_loss=0.03935, over 1400121.21 frames.], batch size: 21, lr: 6.01e-04 +2022-05-27 19:53:03,406 INFO [train.py:823] (2/4) Epoch 29, batch 0, loss[loss=0.1675, simple_loss=0.258, pruned_loss=0.03843, over 7004.00 frames.], tot_loss[loss=0.1675, simple_loss=0.258, pruned_loss=0.03843, over 7004.00 frames.], batch size: 26, lr: 5.90e-04 +2022-05-27 19:53:42,715 INFO [train.py:823] (2/4) Epoch 29, batch 50, loss[loss=0.1713, simple_loss=0.261, pruned_loss=0.04081, over 7277.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2543, pruned_loss=0.03996, over 320513.08 frames.], batch size: 21, lr: 5.90e-04 +2022-05-27 19:54:22,252 INFO [train.py:823] (2/4) Epoch 29, batch 100, loss[loss=0.1681, simple_loss=0.257, pruned_loss=0.03961, over 7236.00 frames.], tot_loss[loss=0.1675, simple_loss=0.256, pruned_loss=0.03948, over 569493.04 frames.], batch size: 24, lr: 5.89e-04 +2022-05-27 19:55:01,937 INFO [train.py:823] (2/4) Epoch 29, batch 150, loss[loss=0.1707, simple_loss=0.2478, pruned_loss=0.04674, over 7289.00 frames.], tot_loss[loss=0.1678, simple_loss=0.256, pruned_loss=0.03977, over 760209.60 frames.], batch size: 19, lr: 5.89e-04 +2022-05-27 19:55:40,947 INFO [train.py:823] (2/4) Epoch 29, batch 200, loss[loss=0.1804, simple_loss=0.2795, pruned_loss=0.04061, over 7331.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2572, pruned_loss=0.04013, over 899095.75 frames.], batch size: 23, lr: 5.88e-04 +2022-05-27 19:56:21,576 INFO [train.py:823] (2/4) Epoch 29, batch 250, loss[loss=0.1671, simple_loss=0.2506, pruned_loss=0.04185, over 7396.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2543, pruned_loss=0.03889, over 1015129.75 frames.], batch size: 19, lr: 5.88e-04 +2022-05-27 19:57:00,571 INFO [train.py:823] (2/4) Epoch 29, batch 300, loss[loss=0.1582, simple_loss=0.2508, pruned_loss=0.03278, over 7275.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2554, pruned_loss=0.03936, over 1104975.68 frames.], batch size: 20, lr: 5.87e-04 +2022-05-27 19:57:40,079 INFO [train.py:823] (2/4) Epoch 29, batch 350, loss[loss=0.1543, simple_loss=0.2374, pruned_loss=0.03558, over 7258.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2559, pruned_loss=0.03953, over 1173793.27 frames.], batch size: 16, lr: 5.87e-04 +2022-05-27 19:58:19,106 INFO [train.py:823] (2/4) Epoch 29, batch 400, loss[loss=0.1422, simple_loss=0.215, pruned_loss=0.0347, over 7303.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2567, pruned_loss=0.03957, over 1229026.89 frames.], batch size: 17, lr: 5.86e-04 +2022-05-27 19:58:59,832 INFO [train.py:823] (2/4) Epoch 29, batch 450, loss[loss=0.1586, simple_loss=0.2324, pruned_loss=0.04244, over 7089.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2567, pruned_loss=0.04, over 1269323.58 frames.], batch size: 18, lr: 5.85e-04 +2022-05-27 19:59:40,117 INFO [train.py:823] (2/4) Epoch 29, batch 500, loss[loss=0.1637, simple_loss=0.2571, pruned_loss=0.03513, over 7116.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2564, pruned_loss=0.03999, over 1296739.95 frames.], batch size: 20, lr: 5.85e-04 +2022-05-27 20:00:19,255 INFO [train.py:823] (2/4) Epoch 29, batch 550, loss[loss=0.1747, simple_loss=0.2721, pruned_loss=0.03869, over 6515.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2565, pruned_loss=0.03956, over 1326362.21 frames.], batch size: 34, lr: 5.84e-04 +2022-05-27 20:00:58,217 INFO [train.py:823] (2/4) Epoch 29, batch 600, loss[loss=0.1576, simple_loss=0.2533, pruned_loss=0.03097, over 6569.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2577, pruned_loss=0.03969, over 1347099.55 frames.], batch size: 34, lr: 5.84e-04 +2022-05-27 20:01:37,894 INFO [train.py:823] (2/4) Epoch 29, batch 650, loss[loss=0.1576, simple_loss=0.2556, pruned_loss=0.02984, over 7372.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2577, pruned_loss=0.03986, over 1364121.20 frames.], batch size: 20, lr: 5.83e-04 +2022-05-27 20:02:16,338 INFO [train.py:823] (2/4) Epoch 29, batch 700, loss[loss=0.1493, simple_loss=0.2372, pruned_loss=0.03072, over 7185.00 frames.], tot_loss[loss=0.1681, simple_loss=0.257, pruned_loss=0.03959, over 1372051.32 frames.], batch size: 19, lr: 5.83e-04 +2022-05-27 20:02:55,475 INFO [train.py:823] (2/4) Epoch 29, batch 750, loss[loss=0.1833, simple_loss=0.2599, pruned_loss=0.05339, over 4911.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2572, pruned_loss=0.0398, over 1379026.07 frames.], batch size: 47, lr: 5.82e-04 +2022-05-27 20:03:34,154 INFO [train.py:823] (2/4) Epoch 29, batch 800, loss[loss=0.1351, simple_loss=0.2201, pruned_loss=0.02509, over 7191.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2565, pruned_loss=0.03994, over 1387544.95 frames.], batch size: 18, lr: 5.82e-04 +2022-05-27 20:04:13,269 INFO [train.py:823] (2/4) Epoch 29, batch 850, loss[loss=0.1856, simple_loss=0.2777, pruned_loss=0.04673, over 7195.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2565, pruned_loss=0.03961, over 1397220.18 frames.], batch size: 24, lr: 5.81e-04 +2022-05-27 20:04:52,054 INFO [train.py:823] (2/4) Epoch 29, batch 900, loss[loss=0.1676, simple_loss=0.2564, pruned_loss=0.03941, over 7153.00 frames.], tot_loss[loss=0.1687, simple_loss=0.2571, pruned_loss=0.0402, over 1396011.35 frames.], batch size: 22, lr: 5.81e-04 +2022-05-27 20:05:30,821 INFO [train.py:823] (2/4) Epoch 29, batch 950, loss[loss=0.2104, simple_loss=0.2832, pruned_loss=0.06882, over 5086.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2569, pruned_loss=0.04038, over 1390099.84 frames.], batch size: 47, lr: 5.80e-04 +2022-05-27 20:05:46,357 INFO [train.py:823] (2/4) Epoch 30, batch 0, loss[loss=0.1743, simple_loss=0.2646, pruned_loss=0.04203, over 7382.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2646, pruned_loss=0.04203, over 7382.00 frames.], batch size: 20, lr: 5.71e-04 +2022-05-27 20:06:25,465 INFO [train.py:823] (2/4) Epoch 30, batch 50, loss[loss=0.1463, simple_loss=0.2301, pruned_loss=0.03123, over 7094.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2536, pruned_loss=0.03847, over 314435.34 frames.], batch size: 19, lr: 5.70e-04 +2022-05-27 20:07:04,814 INFO [train.py:823] (2/4) Epoch 30, batch 100, loss[loss=0.1596, simple_loss=0.2351, pruned_loss=0.04201, over 7286.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2535, pruned_loss=0.0381, over 561865.95 frames.], batch size: 17, lr: 5.70e-04 +2022-05-27 20:07:43,704 INFO [train.py:823] (2/4) Epoch 30, batch 150, loss[loss=0.1813, simple_loss=0.2755, pruned_loss=0.04348, over 7159.00 frames.], tot_loss[loss=0.167, simple_loss=0.2558, pruned_loss=0.03915, over 753968.19 frames.], batch size: 23, lr: 5.69e-04 +2022-05-27 20:08:23,095 INFO [train.py:823] (2/4) Epoch 30, batch 200, loss[loss=0.1867, simple_loss=0.278, pruned_loss=0.04771, over 7147.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2568, pruned_loss=0.03932, over 901544.82 frames.], batch size: 23, lr: 5.69e-04 +2022-05-27 20:09:02,240 INFO [train.py:823] (2/4) Epoch 30, batch 250, loss[loss=0.161, simple_loss=0.2526, pruned_loss=0.03474, over 7107.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2572, pruned_loss=0.0392, over 1013579.31 frames.], batch size: 19, lr: 5.68e-04 +2022-05-27 20:09:41,495 INFO [train.py:823] (2/4) Epoch 30, batch 300, loss[loss=0.1509, simple_loss=0.2268, pruned_loss=0.03754, over 7156.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2584, pruned_loss=0.03971, over 1106709.16 frames.], batch size: 17, lr: 5.68e-04 +2022-05-27 20:10:20,372 INFO [train.py:823] (2/4) Epoch 30, batch 350, loss[loss=0.1782, simple_loss=0.2773, pruned_loss=0.03948, over 7241.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2579, pruned_loss=0.03968, over 1176568.90 frames.], batch size: 24, lr: 5.67e-04 +2022-05-27 20:10:59,239 INFO [train.py:823] (2/4) Epoch 30, batch 400, loss[loss=0.1613, simple_loss=0.2586, pruned_loss=0.03198, over 7063.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2568, pruned_loss=0.03887, over 1230384.56 frames.], batch size: 26, lr: 5.67e-04 +2022-05-27 20:11:38,358 INFO [train.py:823] (2/4) Epoch 30, batch 450, loss[loss=0.1912, simple_loss=0.2822, pruned_loss=0.05013, over 6979.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2564, pruned_loss=0.03885, over 1268465.73 frames.], batch size: 29, lr: 5.66e-04 +2022-05-27 20:12:17,473 INFO [train.py:823] (2/4) Epoch 30, batch 500, loss[loss=0.1837, simple_loss=0.271, pruned_loss=0.04821, over 7091.00 frames.], tot_loss[loss=0.167, simple_loss=0.2564, pruned_loss=0.03883, over 1301855.44 frames.], batch size: 19, lr: 5.66e-04 +2022-05-27 20:12:56,755 INFO [train.py:823] (2/4) Epoch 30, batch 550, loss[loss=0.1635, simple_loss=0.2684, pruned_loss=0.0293, over 7405.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2566, pruned_loss=0.03886, over 1326758.10 frames.], batch size: 22, lr: 5.65e-04 +2022-05-27 20:13:37,217 INFO [train.py:823] (2/4) Epoch 30, batch 600, loss[loss=0.1481, simple_loss=0.2313, pruned_loss=0.03248, over 7194.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2557, pruned_loss=0.03838, over 1344594.38 frames.], batch size: 19, lr: 5.65e-04 +2022-05-27 20:14:16,392 INFO [train.py:823] (2/4) Epoch 30, batch 650, loss[loss=0.1792, simple_loss=0.2683, pruned_loss=0.04504, over 7426.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2552, pruned_loss=0.03867, over 1358411.79 frames.], batch size: 22, lr: 5.64e-04 +2022-05-27 20:14:55,809 INFO [train.py:823] (2/4) Epoch 30, batch 700, loss[loss=0.1386, simple_loss=0.2322, pruned_loss=0.02252, over 7286.00 frames.], tot_loss[loss=0.165, simple_loss=0.2537, pruned_loss=0.03812, over 1376598.34 frames.], batch size: 19, lr: 5.64e-04 +2022-05-27 20:15:34,838 INFO [train.py:823] (2/4) Epoch 30, batch 750, loss[loss=0.1662, simple_loss=0.2466, pruned_loss=0.04292, over 7098.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2538, pruned_loss=0.03835, over 1382256.31 frames.], batch size: 18, lr: 5.63e-04 +2022-05-27 20:16:13,911 INFO [train.py:823] (2/4) Epoch 30, batch 800, loss[loss=0.1652, simple_loss=0.2646, pruned_loss=0.03286, over 6951.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2546, pruned_loss=0.0382, over 1391375.38 frames.], batch size: 26, lr: 5.63e-04 +2022-05-27 20:16:52,941 INFO [train.py:823] (2/4) Epoch 30, batch 850, loss[loss=0.1439, simple_loss=0.2294, pruned_loss=0.02915, over 7188.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2542, pruned_loss=0.03808, over 1390602.99 frames.], batch size: 18, lr: 5.62e-04 +2022-05-27 20:17:32,095 INFO [train.py:823] (2/4) Epoch 30, batch 900, loss[loss=0.1534, simple_loss=0.2436, pruned_loss=0.03158, over 7283.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2543, pruned_loss=0.03804, over 1395526.58 frames.], batch size: 19, lr: 5.62e-04 +2022-05-27 20:18:24,263 INFO [train.py:823] (2/4) Epoch 31, batch 0, loss[loss=0.1546, simple_loss=0.2404, pruned_loss=0.03436, over 7382.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2404, pruned_loss=0.03436, over 7382.00 frames.], batch size: 20, lr: 5.52e-04 +2022-05-27 20:19:03,839 INFO [train.py:823] (2/4) Epoch 31, batch 50, loss[loss=0.1396, simple_loss=0.2288, pruned_loss=0.02524, over 7198.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2495, pruned_loss=0.03735, over 324809.52 frames.], batch size: 18, lr: 5.52e-04 +2022-05-27 20:19:44,389 INFO [train.py:823] (2/4) Epoch 31, batch 100, loss[loss=0.1446, simple_loss=0.2318, pruned_loss=0.02876, over 7307.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2523, pruned_loss=0.03721, over 565230.30 frames.], batch size: 16, lr: 5.51e-04 +2022-05-27 20:20:23,624 INFO [train.py:823] (2/4) Epoch 31, batch 150, loss[loss=0.1837, simple_loss=0.2731, pruned_loss=0.04715, over 7207.00 frames.], tot_loss[loss=0.1645, simple_loss=0.254, pruned_loss=0.03748, over 754686.57 frames.], batch size: 25, lr: 5.51e-04 +2022-05-27 20:21:02,328 INFO [train.py:823] (2/4) Epoch 31, batch 200, loss[loss=0.1501, simple_loss=0.2362, pruned_loss=0.03199, over 7085.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2542, pruned_loss=0.0375, over 899210.33 frames.], batch size: 18, lr: 5.50e-04 +2022-05-27 20:21:41,508 INFO [train.py:823] (2/4) Epoch 31, batch 250, loss[loss=0.1621, simple_loss=0.2433, pruned_loss=0.04045, over 7139.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2545, pruned_loss=0.03835, over 1005405.15 frames.], batch size: 17, lr: 5.50e-04 +2022-05-27 20:22:21,796 INFO [train.py:823] (2/4) Epoch 31, batch 300, loss[loss=0.1937, simple_loss=0.2869, pruned_loss=0.05021, over 7302.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2545, pruned_loss=0.0383, over 1097757.39 frames.], batch size: 22, lr: 5.49e-04 +2022-05-27 20:23:00,958 INFO [train.py:823] (2/4) Epoch 31, batch 350, loss[loss=0.1492, simple_loss=0.236, pruned_loss=0.03118, over 7155.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2542, pruned_loss=0.03799, over 1164566.55 frames.], batch size: 17, lr: 5.49e-04 +2022-05-27 20:23:41,374 INFO [train.py:823] (2/4) Epoch 31, batch 400, loss[loss=0.1789, simple_loss=0.2591, pruned_loss=0.04932, over 7399.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2546, pruned_loss=0.03801, over 1225964.05 frames.], batch size: 19, lr: 5.49e-04 +2022-05-27 20:24:20,637 INFO [train.py:823] (2/4) Epoch 31, batch 450, loss[loss=0.1467, simple_loss=0.2318, pruned_loss=0.03081, over 7298.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2539, pruned_loss=0.03751, over 1270341.18 frames.], batch size: 18, lr: 5.48e-04 +2022-05-27 20:24:59,879 INFO [train.py:823] (2/4) Epoch 31, batch 500, loss[loss=0.1587, simple_loss=0.2489, pruned_loss=0.0343, over 7102.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2526, pruned_loss=0.0373, over 1303011.16 frames.], batch size: 18, lr: 5.48e-04 +2022-05-27 20:25:39,471 INFO [train.py:823] (2/4) Epoch 31, batch 550, loss[loss=0.1498, simple_loss=0.238, pruned_loss=0.03079, over 7376.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2519, pruned_loss=0.03737, over 1327201.45 frames.], batch size: 19, lr: 5.47e-04 +2022-05-27 20:26:18,512 INFO [train.py:823] (2/4) Epoch 31, batch 600, loss[loss=0.1479, simple_loss=0.2283, pruned_loss=0.03376, over 7176.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2536, pruned_loss=0.03846, over 1347225.21 frames.], batch size: 16, lr: 5.47e-04 +2022-05-27 20:26:57,606 INFO [train.py:823] (2/4) Epoch 31, batch 650, loss[loss=0.184, simple_loss=0.2658, pruned_loss=0.0511, over 7168.00 frames.], tot_loss[loss=0.1655, simple_loss=0.254, pruned_loss=0.0385, over 1362573.88 frames.], batch size: 22, lr: 5.46e-04 +2022-05-27 20:27:36,454 INFO [train.py:823] (2/4) Epoch 31, batch 700, loss[loss=0.1489, simple_loss=0.2282, pruned_loss=0.03484, over 7283.00 frames.], tot_loss[loss=0.1662, simple_loss=0.2549, pruned_loss=0.03877, over 1371563.19 frames.], batch size: 17, lr: 5.46e-04 +2022-05-27 20:28:15,533 INFO [train.py:823] (2/4) Epoch 31, batch 750, loss[loss=0.1239, simple_loss=0.2114, pruned_loss=0.01822, over 7288.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2554, pruned_loss=0.03858, over 1382988.70 frames.], batch size: 18, lr: 5.45e-04 +2022-05-27 20:28:54,273 INFO [train.py:823] (2/4) Epoch 31, batch 800, loss[loss=0.1817, simple_loss=0.2489, pruned_loss=0.05731, over 7208.00 frames.], tot_loss[loss=0.1664, simple_loss=0.2554, pruned_loss=0.0387, over 1393501.86 frames.], batch size: 16, lr: 5.45e-04 +2022-05-27 20:29:32,820 INFO [train.py:823] (2/4) Epoch 31, batch 850, loss[loss=0.1713, simple_loss=0.2692, pruned_loss=0.03674, over 7033.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2552, pruned_loss=0.0383, over 1391829.81 frames.], batch size: 26, lr: 5.44e-04 +2022-05-27 20:30:11,761 INFO [train.py:823] (2/4) Epoch 31, batch 900, loss[loss=0.1581, simple_loss=0.2439, pruned_loss=0.03612, over 7103.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2564, pruned_loss=0.03899, over 1396902.48 frames.], batch size: 19, lr: 5.44e-04 +2022-05-27 20:31:03,505 INFO [train.py:823] (2/4) Epoch 32, batch 0, loss[loss=0.1974, simple_loss=0.2784, pruned_loss=0.05822, over 4747.00 frames.], tot_loss[loss=0.1974, simple_loss=0.2784, pruned_loss=0.05822, over 4747.00 frames.], batch size: 46, lr: 5.35e-04 +2022-05-27 20:31:42,709 INFO [train.py:823] (2/4) Epoch 32, batch 50, loss[loss=0.1478, simple_loss=0.2242, pruned_loss=0.03575, over 7311.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2563, pruned_loss=0.03869, over 319710.22 frames.], batch size: 17, lr: 5.35e-04 +2022-05-27 20:32:21,594 INFO [train.py:823] (2/4) Epoch 32, batch 100, loss[loss=0.1802, simple_loss=0.2696, pruned_loss=0.04536, over 7174.00 frames.], tot_loss[loss=0.1665, simple_loss=0.256, pruned_loss=0.0385, over 565586.01 frames.], batch size: 22, lr: 5.34e-04 +2022-05-27 20:33:00,097 INFO [train.py:823] (2/4) Epoch 32, batch 150, loss[loss=0.1514, simple_loss=0.2489, pruned_loss=0.027, over 7204.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2551, pruned_loss=0.03873, over 757923.54 frames.], batch size: 19, lr: 5.34e-04 +2022-05-27 20:33:39,219 INFO [train.py:823] (2/4) Epoch 32, batch 200, loss[loss=0.1644, simple_loss=0.2573, pruned_loss=0.03577, over 7204.00 frames.], tot_loss[loss=0.1663, simple_loss=0.2555, pruned_loss=0.03851, over 904350.91 frames.], batch size: 19, lr: 5.33e-04 +2022-05-27 20:34:18,219 INFO [train.py:823] (2/4) Epoch 32, batch 250, loss[loss=0.1875, simple_loss=0.2697, pruned_loss=0.05263, over 7197.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2538, pruned_loss=0.03824, over 1021196.72 frames.], batch size: 19, lr: 5.33e-04 +2022-05-27 20:34:57,858 INFO [train.py:823] (2/4) Epoch 32, batch 300, loss[loss=0.1789, simple_loss=0.2565, pruned_loss=0.05061, over 7305.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2521, pruned_loss=0.03744, over 1106577.36 frames.], batch size: 19, lr: 5.32e-04 +2022-05-27 20:35:36,766 INFO [train.py:823] (2/4) Epoch 32, batch 350, loss[loss=0.1801, simple_loss=0.2487, pruned_loss=0.05577, over 7003.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2536, pruned_loss=0.03835, over 1175944.63 frames.], batch size: 16, lr: 5.32e-04 +2022-05-27 20:36:16,010 INFO [train.py:823] (2/4) Epoch 32, batch 400, loss[loss=0.1908, simple_loss=0.2774, pruned_loss=0.05212, over 6498.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2546, pruned_loss=0.03828, over 1225498.15 frames.], batch size: 34, lr: 5.32e-04 +2022-05-27 20:36:54,891 INFO [train.py:823] (2/4) Epoch 32, batch 450, loss[loss=0.1855, simple_loss=0.2784, pruned_loss=0.04625, over 7156.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2544, pruned_loss=0.03805, over 1266697.99 frames.], batch size: 23, lr: 5.31e-04 +2022-05-27 20:37:35,399 INFO [train.py:823] (2/4) Epoch 32, batch 500, loss[loss=0.1657, simple_loss=0.259, pruned_loss=0.03623, over 7206.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2536, pruned_loss=0.03759, over 1300322.64 frames.], batch size: 20, lr: 5.31e-04 +2022-05-27 20:38:14,357 INFO [train.py:823] (2/4) Epoch 32, batch 550, loss[loss=0.216, simple_loss=0.3139, pruned_loss=0.05909, over 7219.00 frames.], tot_loss[loss=0.165, simple_loss=0.2546, pruned_loss=0.03774, over 1328636.12 frames.], batch size: 25, lr: 5.30e-04 +2022-05-27 20:38:53,731 INFO [train.py:823] (2/4) Epoch 32, batch 600, loss[loss=0.1469, simple_loss=0.2296, pruned_loss=0.03206, over 7297.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2551, pruned_loss=0.0382, over 1349853.40 frames.], batch size: 17, lr: 5.30e-04 +2022-05-27 20:39:32,596 INFO [train.py:823] (2/4) Epoch 32, batch 650, loss[loss=0.1556, simple_loss=0.2542, pruned_loss=0.0285, over 7030.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2546, pruned_loss=0.03798, over 1361824.58 frames.], batch size: 26, lr: 5.29e-04 +2022-05-27 20:40:11,854 INFO [train.py:823] (2/4) Epoch 32, batch 700, loss[loss=0.1724, simple_loss=0.2631, pruned_loss=0.04088, over 7113.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2538, pruned_loss=0.03781, over 1378118.19 frames.], batch size: 20, lr: 5.29e-04 +2022-05-27 20:40:50,468 INFO [train.py:823] (2/4) Epoch 32, batch 750, loss[loss=0.1442, simple_loss=0.2258, pruned_loss=0.03129, over 7393.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2536, pruned_loss=0.03751, over 1387814.77 frames.], batch size: 19, lr: 5.29e-04 +2022-05-27 20:41:30,226 INFO [train.py:823] (2/4) Epoch 32, batch 800, loss[loss=0.1377, simple_loss=0.2267, pruned_loss=0.02433, over 7155.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2538, pruned_loss=0.03735, over 1396044.36 frames.], batch size: 17, lr: 5.28e-04 +2022-05-27 20:42:10,631 INFO [train.py:823] (2/4) Epoch 32, batch 850, loss[loss=0.1327, simple_loss=0.2211, pruned_loss=0.02217, over 7022.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2536, pruned_loss=0.03764, over 1399519.09 frames.], batch size: 17, lr: 5.28e-04 +2022-05-27 20:42:49,945 INFO [train.py:823] (2/4) Epoch 32, batch 900, loss[loss=0.1628, simple_loss=0.2472, pruned_loss=0.03917, over 7419.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2539, pruned_loss=0.0375, over 1404830.74 frames.], batch size: 18, lr: 5.27e-04 +2022-05-27 20:43:43,934 INFO [train.py:823] (2/4) Epoch 33, batch 0, loss[loss=0.1723, simple_loss=0.2654, pruned_loss=0.03957, over 7027.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2654, pruned_loss=0.03957, over 7027.00 frames.], batch size: 29, lr: 5.19e-04 +2022-05-27 20:44:22,844 INFO [train.py:823] (2/4) Epoch 33, batch 50, loss[loss=0.1515, simple_loss=0.238, pruned_loss=0.03249, over 7154.00 frames.], tot_loss[loss=0.1652, simple_loss=0.254, pruned_loss=0.0382, over 317800.88 frames.], batch size: 17, lr: 5.18e-04 +2022-05-27 20:45:02,634 INFO [train.py:823] (2/4) Epoch 33, batch 100, loss[loss=0.1639, simple_loss=0.2403, pruned_loss=0.04371, over 6803.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2525, pruned_loss=0.03719, over 561397.18 frames.], batch size: 15, lr: 5.18e-04 +2022-05-27 20:45:41,727 INFO [train.py:823] (2/4) Epoch 33, batch 150, loss[loss=0.1618, simple_loss=0.2555, pruned_loss=0.03402, over 7188.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2543, pruned_loss=0.03776, over 749988.37 frames.], batch size: 21, lr: 5.18e-04 +2022-05-27 20:46:21,821 INFO [train.py:823] (2/4) Epoch 33, batch 200, loss[loss=0.2036, simple_loss=0.3039, pruned_loss=0.05163, over 7114.00 frames.], tot_loss[loss=0.1646, simple_loss=0.2541, pruned_loss=0.03754, over 892045.34 frames.], batch size: 20, lr: 5.17e-04 +2022-05-27 20:47:00,794 INFO [train.py:823] (2/4) Epoch 33, batch 250, loss[loss=0.1812, simple_loss=0.273, pruned_loss=0.04475, over 7143.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2539, pruned_loss=0.03783, over 1013056.88 frames.], batch size: 23, lr: 5.17e-04 +2022-05-27 20:47:39,831 INFO [train.py:823] (2/4) Epoch 33, batch 300, loss[loss=0.1507, simple_loss=0.23, pruned_loss=0.03565, over 7174.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2539, pruned_loss=0.03782, over 1106910.66 frames.], batch size: 17, lr: 5.16e-04 +2022-05-27 20:48:18,979 INFO [train.py:823] (2/4) Epoch 33, batch 350, loss[loss=0.168, simple_loss=0.2639, pruned_loss=0.0361, over 7351.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2543, pruned_loss=0.03773, over 1176372.66 frames.], batch size: 23, lr: 5.16e-04 +2022-05-27 20:48:57,954 INFO [train.py:823] (2/4) Epoch 33, batch 400, loss[loss=0.1791, simple_loss=0.2734, pruned_loss=0.04244, over 7427.00 frames.], tot_loss[loss=0.1657, simple_loss=0.2552, pruned_loss=0.03811, over 1231260.38 frames.], batch size: 22, lr: 5.16e-04 +2022-05-27 20:49:36,998 INFO [train.py:823] (2/4) Epoch 33, batch 450, loss[loss=0.1314, simple_loss=0.2195, pruned_loss=0.02162, over 7291.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2549, pruned_loss=0.03789, over 1272038.99 frames.], batch size: 19, lr: 5.15e-04 +2022-05-27 20:50:15,612 INFO [train.py:823] (2/4) Epoch 33, batch 500, loss[loss=0.1742, simple_loss=0.2702, pruned_loss=0.03904, over 6941.00 frames.], tot_loss[loss=0.1648, simple_loss=0.2545, pruned_loss=0.03762, over 1306519.08 frames.], batch size: 29, lr: 5.15e-04 +2022-05-27 20:50:54,744 INFO [train.py:823] (2/4) Epoch 33, batch 550, loss[loss=0.1684, simple_loss=0.2514, pruned_loss=0.04268, over 7381.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2539, pruned_loss=0.03736, over 1334440.00 frames.], batch size: 19, lr: 5.14e-04 +2022-05-27 20:51:33,963 INFO [train.py:823] (2/4) Epoch 33, batch 600, loss[loss=0.168, simple_loss=0.2723, pruned_loss=0.03188, over 7419.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2527, pruned_loss=0.03677, over 1353972.87 frames.], batch size: 22, lr: 5.14e-04 +2022-05-27 20:52:12,814 INFO [train.py:823] (2/4) Epoch 33, batch 650, loss[loss=0.1535, simple_loss=0.2378, pruned_loss=0.03456, over 7143.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2518, pruned_loss=0.0365, over 1373077.89 frames.], batch size: 17, lr: 5.14e-04 +2022-05-27 20:52:51,742 INFO [train.py:823] (2/4) Epoch 33, batch 700, loss[loss=0.1731, simple_loss=0.2707, pruned_loss=0.03771, over 6404.00 frames.], tot_loss[loss=0.163, simple_loss=0.2523, pruned_loss=0.03678, over 1384293.70 frames.], batch size: 34, lr: 5.13e-04 +2022-05-27 20:53:30,753 INFO [train.py:823] (2/4) Epoch 33, batch 750, loss[loss=0.1576, simple_loss=0.2574, pruned_loss=0.02883, over 7169.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2532, pruned_loss=0.03698, over 1391292.15 frames.], batch size: 25, lr: 5.13e-04 +2022-05-27 20:54:09,382 INFO [train.py:823] (2/4) Epoch 33, batch 800, loss[loss=0.1638, simple_loss=0.2571, pruned_loss=0.03526, over 7157.00 frames.], tot_loss[loss=0.1636, simple_loss=0.254, pruned_loss=0.03662, over 1391776.62 frames.], batch size: 22, lr: 5.12e-04 +2022-05-27 20:54:48,178 INFO [train.py:823] (2/4) Epoch 33, batch 850, loss[loss=0.1358, simple_loss=0.2213, pruned_loss=0.02522, over 7097.00 frames.], tot_loss[loss=0.163, simple_loss=0.2531, pruned_loss=0.03648, over 1400314.66 frames.], batch size: 18, lr: 5.12e-04 +2022-05-27 20:55:26,891 INFO [train.py:823] (2/4) Epoch 33, batch 900, loss[loss=0.1388, simple_loss=0.2229, pruned_loss=0.0273, over 7004.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2529, pruned_loss=0.03662, over 1401693.02 frames.], batch size: 16, lr: 5.12e-04 +2022-05-27 20:56:18,080 INFO [train.py:823] (2/4) Epoch 34, batch 0, loss[loss=0.1764, simple_loss=0.2608, pruned_loss=0.04601, over 7252.00 frames.], tot_loss[loss=0.1764, simple_loss=0.2608, pruned_loss=0.04601, over 7252.00 frames.], batch size: 24, lr: 5.04e-04 +2022-05-27 20:56:56,732 INFO [train.py:823] (2/4) Epoch 34, batch 50, loss[loss=0.1298, simple_loss=0.2161, pruned_loss=0.02173, over 6781.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2526, pruned_loss=0.03804, over 319856.55 frames.], batch size: 15, lr: 5.03e-04 +2022-05-27 20:57:36,511 INFO [train.py:823] (2/4) Epoch 34, batch 100, loss[loss=0.1527, simple_loss=0.2428, pruned_loss=0.03137, over 7282.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2512, pruned_loss=0.03658, over 560131.09 frames.], batch size: 21, lr: 5.03e-04 +2022-05-27 20:58:15,740 INFO [train.py:823] (2/4) Epoch 34, batch 150, loss[loss=0.1578, simple_loss=0.2628, pruned_loss=0.02638, over 7298.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2535, pruned_loss=0.03684, over 754299.95 frames.], batch size: 22, lr: 5.02e-04 +2022-05-27 20:58:54,856 INFO [train.py:823] (2/4) Epoch 34, batch 200, loss[loss=0.1763, simple_loss=0.26, pruned_loss=0.04629, over 7020.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2534, pruned_loss=0.03683, over 902430.22 frames.], batch size: 26, lr: 5.02e-04 +2022-05-27 20:59:34,246 INFO [train.py:823] (2/4) Epoch 34, batch 250, loss[loss=0.1691, simple_loss=0.2544, pruned_loss=0.04187, over 7054.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2529, pruned_loss=0.03697, over 1013916.76 frames.], batch size: 26, lr: 5.02e-04 +2022-05-27 21:00:13,170 INFO [train.py:823] (2/4) Epoch 34, batch 300, loss[loss=0.1806, simple_loss=0.2785, pruned_loss=0.0413, over 7376.00 frames.], tot_loss[loss=0.1624, simple_loss=0.252, pruned_loss=0.03644, over 1102846.28 frames.], batch size: 21, lr: 5.01e-04 +2022-05-27 21:00:53,313 INFO [train.py:823] (2/4) Epoch 34, batch 350, loss[loss=0.1496, simple_loss=0.2342, pruned_loss=0.03247, over 7098.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2518, pruned_loss=0.03673, over 1169206.36 frames.], batch size: 19, lr: 5.01e-04 +2022-05-27 21:01:32,697 INFO [train.py:823] (2/4) Epoch 34, batch 400, loss[loss=0.1649, simple_loss=0.2664, pruned_loss=0.0317, over 7284.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2522, pruned_loss=0.03665, over 1223984.83 frames.], batch size: 21, lr: 5.00e-04 +2022-05-27 21:02:11,951 INFO [train.py:823] (2/4) Epoch 34, batch 450, loss[loss=0.1859, simple_loss=0.2878, pruned_loss=0.04198, over 7280.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2528, pruned_loss=0.03672, over 1269420.31 frames.], batch size: 20, lr: 5.00e-04 +2022-05-27 21:02:51,512 INFO [train.py:823] (2/4) Epoch 34, batch 500, loss[loss=0.1866, simple_loss=0.2787, pruned_loss=0.04722, over 7131.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2521, pruned_loss=0.03659, over 1303131.46 frames.], batch size: 23, lr: 5.00e-04 +2022-05-27 21:03:31,215 INFO [train.py:823] (2/4) Epoch 34, batch 550, loss[loss=0.1711, simple_loss=0.2688, pruned_loss=0.03672, over 7213.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2518, pruned_loss=0.0364, over 1335288.50 frames.], batch size: 25, lr: 4.99e-04 +2022-05-27 21:04:10,513 INFO [train.py:823] (2/4) Epoch 34, batch 600, loss[loss=0.1468, simple_loss=0.2279, pruned_loss=0.03282, over 7273.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2524, pruned_loss=0.03647, over 1353438.94 frames.], batch size: 17, lr: 4.99e-04 +2022-05-27 21:04:51,416 INFO [train.py:823] (2/4) Epoch 34, batch 650, loss[loss=0.1591, simple_loss=0.2495, pruned_loss=0.03433, over 7042.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2512, pruned_loss=0.03627, over 1367942.50 frames.], batch size: 29, lr: 4.99e-04 +2022-05-27 21:05:35,723 INFO [train.py:823] (2/4) Epoch 34, batch 700, loss[loss=0.1605, simple_loss=0.2485, pruned_loss=0.03631, over 7380.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2513, pruned_loss=0.0366, over 1377475.78 frames.], batch size: 20, lr: 4.98e-04 +2022-05-27 21:06:14,444 INFO [train.py:823] (2/4) Epoch 34, batch 750, loss[loss=0.1331, simple_loss=0.2228, pruned_loss=0.02175, over 7013.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2515, pruned_loss=0.03684, over 1388893.32 frames.], batch size: 16, lr: 4.98e-04 +2022-05-27 21:06:53,511 INFO [train.py:823] (2/4) Epoch 34, batch 800, loss[loss=0.1591, simple_loss=0.263, pruned_loss=0.02765, over 7199.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2516, pruned_loss=0.03686, over 1396389.98 frames.], batch size: 19, lr: 4.97e-04 +2022-05-27 21:07:32,245 INFO [train.py:823] (2/4) Epoch 34, batch 850, loss[loss=0.1624, simple_loss=0.2525, pruned_loss=0.03613, over 7373.00 frames.], tot_loss[loss=0.1629, simple_loss=0.2522, pruned_loss=0.03684, over 1396566.00 frames.], batch size: 21, lr: 4.97e-04 +2022-05-27 21:08:13,033 INFO [train.py:823] (2/4) Epoch 34, batch 900, loss[loss=0.1484, simple_loss=0.2378, pruned_loss=0.02955, over 7106.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2515, pruned_loss=0.03675, over 1400616.57 frames.], batch size: 18, lr: 4.97e-04 +2022-05-27 21:09:07,182 INFO [train.py:823] (2/4) Epoch 35, batch 0, loss[loss=0.1908, simple_loss=0.2851, pruned_loss=0.04831, over 7179.00 frames.], tot_loss[loss=0.1908, simple_loss=0.2851, pruned_loss=0.04831, over 7179.00 frames.], batch size: 21, lr: 4.89e-04 +2022-05-27 21:09:48,046 INFO [train.py:823] (2/4) Epoch 35, batch 50, loss[loss=0.1519, simple_loss=0.2365, pruned_loss=0.03364, over 7178.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2557, pruned_loss=0.03707, over 323745.05 frames.], batch size: 18, lr: 4.89e-04 +2022-05-27 21:10:26,967 INFO [train.py:823] (2/4) Epoch 35, batch 100, loss[loss=0.1648, simple_loss=0.258, pruned_loss=0.03581, over 6594.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2552, pruned_loss=0.03794, over 568697.19 frames.], batch size: 34, lr: 4.88e-04 +2022-05-27 21:11:06,272 INFO [train.py:823] (2/4) Epoch 35, batch 150, loss[loss=0.1811, simple_loss=0.275, pruned_loss=0.04361, over 7229.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2538, pruned_loss=0.03717, over 755105.33 frames.], batch size: 25, lr: 4.88e-04 +2022-05-27 21:11:45,043 INFO [train.py:823] (2/4) Epoch 35, batch 200, loss[loss=0.1547, simple_loss=0.2498, pruned_loss=0.02982, over 6989.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2522, pruned_loss=0.03625, over 904358.35 frames.], batch size: 29, lr: 4.88e-04 +2022-05-27 21:12:24,213 INFO [train.py:823] (2/4) Epoch 35, batch 250, loss[loss=0.1967, simple_loss=0.2805, pruned_loss=0.05647, over 7231.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2523, pruned_loss=0.03667, over 1013782.06 frames.], batch size: 24, lr: 4.87e-04 +2022-05-27 21:13:03,265 INFO [train.py:823] (2/4) Epoch 35, batch 300, loss[loss=0.1621, simple_loss=0.2627, pruned_loss=0.03079, over 7286.00 frames.], tot_loss[loss=0.162, simple_loss=0.2515, pruned_loss=0.03625, over 1106242.18 frames.], batch size: 21, lr: 4.87e-04 +2022-05-27 21:13:42,501 INFO [train.py:823] (2/4) Epoch 35, batch 350, loss[loss=0.1536, simple_loss=0.2358, pruned_loss=0.03564, over 7089.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2523, pruned_loss=0.03657, over 1172770.52 frames.], batch size: 18, lr: 4.87e-04 +2022-05-27 21:14:21,229 INFO [train.py:823] (2/4) Epoch 35, batch 400, loss[loss=0.1883, simple_loss=0.277, pruned_loss=0.04977, over 7184.00 frames.], tot_loss[loss=0.1627, simple_loss=0.252, pruned_loss=0.03667, over 1222872.20 frames.], batch size: 22, lr: 4.86e-04 +2022-05-27 21:15:00,093 INFO [train.py:823] (2/4) Epoch 35, batch 450, loss[loss=0.1484, simple_loss=0.2337, pruned_loss=0.03157, over 7293.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2514, pruned_loss=0.03621, over 1270596.57 frames.], batch size: 17, lr: 4.86e-04 +2022-05-27 21:15:38,774 INFO [train.py:823] (2/4) Epoch 35, batch 500, loss[loss=0.1644, simple_loss=0.2424, pruned_loss=0.04322, over 7030.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2509, pruned_loss=0.03567, over 1305484.56 frames.], batch size: 17, lr: 4.86e-04 +2022-05-27 21:16:17,748 INFO [train.py:823] (2/4) Epoch 35, batch 550, loss[loss=0.1445, simple_loss=0.2291, pruned_loss=0.03001, over 7014.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2497, pruned_loss=0.03523, over 1328602.66 frames.], batch size: 17, lr: 4.85e-04 +2022-05-27 21:16:57,058 INFO [train.py:823] (2/4) Epoch 35, batch 600, loss[loss=0.1805, simple_loss=0.2602, pruned_loss=0.05038, over 7290.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2496, pruned_loss=0.03513, over 1350042.42 frames.], batch size: 20, lr: 4.85e-04 +2022-05-27 21:17:36,318 INFO [train.py:823] (2/4) Epoch 35, batch 650, loss[loss=0.1703, simple_loss=0.2652, pruned_loss=0.03767, over 7056.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2489, pruned_loss=0.03538, over 1368554.78 frames.], batch size: 26, lr: 4.84e-04 +2022-05-27 21:18:15,572 INFO [train.py:823] (2/4) Epoch 35, batch 700, loss[loss=0.1602, simple_loss=0.2482, pruned_loss=0.03617, over 7275.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2483, pruned_loss=0.03527, over 1378338.09 frames.], batch size: 20, lr: 4.84e-04 +2022-05-27 21:18:54,741 INFO [train.py:823] (2/4) Epoch 35, batch 750, loss[loss=0.1609, simple_loss=0.2546, pruned_loss=0.03361, over 7101.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2503, pruned_loss=0.0361, over 1390874.68 frames.], batch size: 19, lr: 4.84e-04 +2022-05-27 21:19:33,001 INFO [train.py:823] (2/4) Epoch 35, batch 800, loss[loss=0.1593, simple_loss=0.2521, pruned_loss=0.03322, over 7309.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2506, pruned_loss=0.03588, over 1394914.47 frames.], batch size: 18, lr: 4.83e-04 +2022-05-27 21:20:12,115 INFO [train.py:823] (2/4) Epoch 35, batch 850, loss[loss=0.159, simple_loss=0.2559, pruned_loss=0.03105, over 7419.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2509, pruned_loss=0.0358, over 1404129.12 frames.], batch size: 22, lr: 4.83e-04 +2022-05-27 21:20:50,657 INFO [train.py:823] (2/4) Epoch 35, batch 900, loss[loss=0.1646, simple_loss=0.2603, pruned_loss=0.03446, over 6451.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2504, pruned_loss=0.03562, over 1401226.39 frames.], batch size: 34, lr: 4.83e-04 +2022-05-27 21:21:29,571 INFO [train.py:823] (2/4) Epoch 35, batch 950, loss[loss=0.1506, simple_loss=0.2408, pruned_loss=0.03024, over 4808.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2517, pruned_loss=0.03643, over 1381022.19 frames.], batch size: 48, lr: 4.82e-04 +2022-05-27 21:21:42,947 INFO [train.py:823] (2/4) Epoch 36, batch 0, loss[loss=0.1907, simple_loss=0.2833, pruned_loss=0.04907, over 7424.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2833, pruned_loss=0.04907, over 7424.00 frames.], batch size: 22, lr: 4.76e-04 +2022-05-27 21:22:22,343 INFO [train.py:823] (2/4) Epoch 36, batch 50, loss[loss=0.1194, simple_loss=0.2074, pruned_loss=0.0157, over 7145.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2447, pruned_loss=0.03393, over 320049.70 frames.], batch size: 17, lr: 4.75e-04 +2022-05-27 21:23:01,951 INFO [train.py:823] (2/4) Epoch 36, batch 100, loss[loss=0.1831, simple_loss=0.2744, pruned_loss=0.04595, over 6365.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2466, pruned_loss=0.03382, over 565179.13 frames.], batch size: 34, lr: 4.75e-04 +2022-05-27 21:23:40,584 INFO [train.py:823] (2/4) Epoch 36, batch 150, loss[loss=0.1851, simple_loss=0.2777, pruned_loss=0.04623, over 7215.00 frames.], tot_loss[loss=0.1605, simple_loss=0.25, pruned_loss=0.03551, over 752641.99 frames.], batch size: 25, lr: 4.74e-04 +2022-05-27 21:24:21,482 INFO [train.py:823] (2/4) Epoch 36, batch 200, loss[loss=0.1683, simple_loss=0.237, pruned_loss=0.04977, over 7304.00 frames.], tot_loss[loss=0.161, simple_loss=0.2502, pruned_loss=0.03587, over 900012.87 frames.], batch size: 17, lr: 4.74e-04 +2022-05-27 21:25:00,057 INFO [train.py:823] (2/4) Epoch 36, batch 250, loss[loss=0.1501, simple_loss=0.2382, pruned_loss=0.03103, over 7403.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2501, pruned_loss=0.03567, over 1013133.63 frames.], batch size: 19, lr: 4.74e-04 +2022-05-27 21:25:39,348 INFO [train.py:823] (2/4) Epoch 36, batch 300, loss[loss=0.1669, simple_loss=0.2618, pruned_loss=0.03601, over 7329.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2496, pruned_loss=0.0356, over 1101637.31 frames.], batch size: 23, lr: 4.73e-04 +2022-05-27 21:26:19,006 INFO [train.py:823] (2/4) Epoch 36, batch 350, loss[loss=0.1512, simple_loss=0.2474, pruned_loss=0.02753, over 7369.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2498, pruned_loss=0.03522, over 1171846.20 frames.], batch size: 20, lr: 4.73e-04 +2022-05-27 21:26:58,368 INFO [train.py:823] (2/4) Epoch 36, batch 400, loss[loss=0.1698, simple_loss=0.2537, pruned_loss=0.04299, over 7105.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2504, pruned_loss=0.03556, over 1227679.88 frames.], batch size: 18, lr: 4.73e-04 +2022-05-27 21:27:39,342 INFO [train.py:823] (2/4) Epoch 36, batch 450, loss[loss=0.1553, simple_loss=0.2439, pruned_loss=0.03328, over 6984.00 frames.], tot_loss[loss=0.1606, simple_loss=0.25, pruned_loss=0.03558, over 1269783.32 frames.], batch size: 26, lr: 4.72e-04 +2022-05-27 21:28:18,473 INFO [train.py:823] (2/4) Epoch 36, batch 500, loss[loss=0.1732, simple_loss=0.2711, pruned_loss=0.03759, over 7244.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2499, pruned_loss=0.03579, over 1300404.83 frames.], batch size: 24, lr: 4.72e-04 +2022-05-27 21:28:57,562 INFO [train.py:823] (2/4) Epoch 36, batch 550, loss[loss=0.1298, simple_loss=0.2047, pruned_loss=0.02751, over 7296.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2497, pruned_loss=0.0357, over 1327396.06 frames.], batch size: 17, lr: 4.72e-04 +2022-05-27 21:29:37,087 INFO [train.py:823] (2/4) Epoch 36, batch 600, loss[loss=0.1443, simple_loss=0.2255, pruned_loss=0.03155, over 7290.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2498, pruned_loss=0.03565, over 1346061.80 frames.], batch size: 17, lr: 4.71e-04 +2022-05-27 21:30:16,487 INFO [train.py:823] (2/4) Epoch 36, batch 650, loss[loss=0.1567, simple_loss=0.2459, pruned_loss=0.03372, over 7378.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2511, pruned_loss=0.0359, over 1361750.57 frames.], batch size: 21, lr: 4.71e-04 +2022-05-27 21:30:56,838 INFO [train.py:823] (2/4) Epoch 36, batch 700, loss[loss=0.1285, simple_loss=0.2123, pruned_loss=0.02235, over 7299.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2506, pruned_loss=0.03552, over 1378029.56 frames.], batch size: 17, lr: 4.71e-04 +2022-05-27 21:31:36,046 INFO [train.py:823] (2/4) Epoch 36, batch 750, loss[loss=0.1684, simple_loss=0.2569, pruned_loss=0.03993, over 7288.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2497, pruned_loss=0.03533, over 1387583.09 frames.], batch size: 21, lr: 4.70e-04 +2022-05-27 21:32:16,411 INFO [train.py:823] (2/4) Epoch 36, batch 800, loss[loss=0.1569, simple_loss=0.2527, pruned_loss=0.03048, over 7380.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2504, pruned_loss=0.03545, over 1387506.20 frames.], batch size: 21, lr: 4.70e-04 +2022-05-27 21:32:55,461 INFO [train.py:823] (2/4) Epoch 36, batch 850, loss[loss=0.1967, simple_loss=0.2891, pruned_loss=0.0521, over 7334.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2501, pruned_loss=0.03574, over 1388168.41 frames.], batch size: 23, lr: 4.70e-04 +2022-05-27 21:33:34,453 INFO [train.py:823] (2/4) Epoch 36, batch 900, loss[loss=0.1571, simple_loss=0.2478, pruned_loss=0.0332, over 7431.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2504, pruned_loss=0.03534, over 1396421.86 frames.], batch size: 22, lr: 4.69e-04 +2022-05-27 21:34:27,396 INFO [train.py:823] (2/4) Epoch 37, batch 0, loss[loss=0.17, simple_loss=0.2748, pruned_loss=0.03256, over 6718.00 frames.], tot_loss[loss=0.17, simple_loss=0.2748, pruned_loss=0.03256, over 6718.00 frames.], batch size: 34, lr: 4.63e-04 +2022-05-27 21:35:06,561 INFO [train.py:823] (2/4) Epoch 37, batch 50, loss[loss=0.1761, simple_loss=0.2719, pruned_loss=0.04012, over 7291.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2514, pruned_loss=0.03488, over 318971.96 frames.], batch size: 22, lr: 4.62e-04 +2022-05-27 21:35:45,358 INFO [train.py:823] (2/4) Epoch 37, batch 100, loss[loss=0.183, simple_loss=0.2704, pruned_loss=0.04781, over 7224.00 frames.], tot_loss[loss=0.161, simple_loss=0.2513, pruned_loss=0.03536, over 561796.22 frames.], batch size: 24, lr: 4.62e-04 +2022-05-27 21:36:24,662 INFO [train.py:823] (2/4) Epoch 37, batch 150, loss[loss=0.1624, simple_loss=0.256, pruned_loss=0.0344, over 7187.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2496, pruned_loss=0.03503, over 750371.97 frames.], batch size: 21, lr: 4.62e-04 +2022-05-27 21:37:04,077 INFO [train.py:823] (2/4) Epoch 37, batch 200, loss[loss=0.1967, simple_loss=0.2956, pruned_loss=0.04892, over 7236.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2479, pruned_loss=0.03482, over 903903.54 frames.], batch size: 24, lr: 4.61e-04 +2022-05-27 21:37:43,505 INFO [train.py:823] (2/4) Epoch 37, batch 250, loss[loss=0.1596, simple_loss=0.2495, pruned_loss=0.03483, over 6980.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2481, pruned_loss=0.03432, over 1020638.91 frames.], batch size: 26, lr: 4.61e-04 +2022-05-27 21:38:22,816 INFO [train.py:823] (2/4) Epoch 37, batch 300, loss[loss=0.1291, simple_loss=0.2154, pruned_loss=0.02143, over 7025.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2475, pruned_loss=0.03479, over 1107430.67 frames.], batch size: 16, lr: 4.61e-04 +2022-05-27 21:39:02,492 INFO [train.py:823] (2/4) Epoch 37, batch 350, loss[loss=0.1811, simple_loss=0.2702, pruned_loss=0.046, over 7165.00 frames.], tot_loss[loss=0.159, simple_loss=0.2483, pruned_loss=0.03479, over 1174550.36 frames.], batch size: 25, lr: 4.60e-04 +2022-05-27 21:39:41,218 INFO [train.py:823] (2/4) Epoch 37, batch 400, loss[loss=0.1542, simple_loss=0.2307, pruned_loss=0.03881, over 7288.00 frames.], tot_loss[loss=0.1604, simple_loss=0.25, pruned_loss=0.03543, over 1230031.48 frames.], batch size: 17, lr: 4.60e-04 +2022-05-27 21:40:19,850 INFO [train.py:823] (2/4) Epoch 37, batch 450, loss[loss=0.1771, simple_loss=0.2625, pruned_loss=0.04584, over 7197.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2509, pruned_loss=0.0358, over 1268980.94 frames.], batch size: 19, lr: 4.60e-04 +2022-05-27 21:40:58,958 INFO [train.py:823] (2/4) Epoch 37, batch 500, loss[loss=0.1605, simple_loss=0.2465, pruned_loss=0.03723, over 7020.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2506, pruned_loss=0.0352, over 1304055.42 frames.], batch size: 16, lr: 4.59e-04 +2022-05-27 21:41:38,542 INFO [train.py:823] (2/4) Epoch 37, batch 550, loss[loss=0.1431, simple_loss=0.2284, pruned_loss=0.02888, over 7004.00 frames.], tot_loss[loss=0.16, simple_loss=0.25, pruned_loss=0.035, over 1330236.33 frames.], batch size: 16, lr: 4.59e-04 +2022-05-27 21:42:17,422 INFO [train.py:823] (2/4) Epoch 37, batch 600, loss[loss=0.1484, simple_loss=0.2405, pruned_loss=0.02817, over 7338.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2507, pruned_loss=0.03515, over 1349479.13 frames.], batch size: 23, lr: 4.59e-04 +2022-05-27 21:42:55,854 INFO [train.py:823] (2/4) Epoch 37, batch 650, loss[loss=0.1486, simple_loss=0.2363, pruned_loss=0.0304, over 7150.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2498, pruned_loss=0.03484, over 1364863.46 frames.], batch size: 17, lr: 4.58e-04 +2022-05-27 21:43:34,882 INFO [train.py:823] (2/4) Epoch 37, batch 700, loss[loss=0.1642, simple_loss=0.2597, pruned_loss=0.03437, over 7423.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2509, pruned_loss=0.0353, over 1373127.31 frames.], batch size: 22, lr: 4.58e-04 +2022-05-27 21:44:14,140 INFO [train.py:823] (2/4) Epoch 37, batch 750, loss[loss=0.1615, simple_loss=0.258, pruned_loss=0.03257, over 4870.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2506, pruned_loss=0.03526, over 1379619.74 frames.], batch size: 46, lr: 4.58e-04 +2022-05-27 21:44:53,038 INFO [train.py:823] (2/4) Epoch 37, batch 800, loss[loss=0.1754, simple_loss=0.2766, pruned_loss=0.03712, over 7285.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2504, pruned_loss=0.03504, over 1383581.97 frames.], batch size: 21, lr: 4.57e-04 +2022-05-27 21:45:31,929 INFO [train.py:823] (2/4) Epoch 37, batch 850, loss[loss=0.1373, simple_loss=0.2246, pruned_loss=0.02503, over 6778.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2498, pruned_loss=0.03497, over 1384467.32 frames.], batch size: 15, lr: 4.57e-04 +2022-05-27 21:46:10,788 INFO [train.py:823] (2/4) Epoch 37, batch 900, loss[loss=0.2112, simple_loss=0.303, pruned_loss=0.05972, over 7145.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2491, pruned_loss=0.03477, over 1391651.78 frames.], batch size: 23, lr: 4.57e-04 +2022-05-27 21:47:05,039 INFO [train.py:823] (2/4) Epoch 38, batch 0, loss[loss=0.1473, simple_loss=0.2362, pruned_loss=0.02914, over 7395.00 frames.], tot_loss[loss=0.1473, simple_loss=0.2362, pruned_loss=0.02914, over 7395.00 frames.], batch size: 19, lr: 4.50e-04 +2022-05-27 21:47:44,047 INFO [train.py:823] (2/4) Epoch 38, batch 50, loss[loss=0.1659, simple_loss=0.2597, pruned_loss=0.03608, over 7107.00 frames.], tot_loss[loss=0.159, simple_loss=0.2488, pruned_loss=0.03463, over 322309.85 frames.], batch size: 19, lr: 4.50e-04 +2022-05-27 21:48:24,797 INFO [train.py:823] (2/4) Epoch 38, batch 100, loss[loss=0.1709, simple_loss=0.2617, pruned_loss=0.04008, over 7322.00 frames.], tot_loss[loss=0.1594, simple_loss=0.249, pruned_loss=0.03494, over 565523.05 frames.], batch size: 23, lr: 4.50e-04 +2022-05-27 21:49:03,866 INFO [train.py:823] (2/4) Epoch 38, batch 150, loss[loss=0.1711, simple_loss=0.277, pruned_loss=0.03263, over 7038.00 frames.], tot_loss[loss=0.159, simple_loss=0.249, pruned_loss=0.03448, over 754823.83 frames.], batch size: 26, lr: 4.50e-04 +2022-05-27 21:49:43,248 INFO [train.py:823] (2/4) Epoch 38, batch 200, loss[loss=0.1669, simple_loss=0.2688, pruned_loss=0.03248, over 6791.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2492, pruned_loss=0.03415, over 903249.32 frames.], batch size: 34, lr: 4.49e-04 +2022-05-27 21:50:22,187 INFO [train.py:823] (2/4) Epoch 38, batch 250, loss[loss=0.1556, simple_loss=0.2511, pruned_loss=0.03002, over 7100.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2482, pruned_loss=0.03411, over 1022458.61 frames.], batch size: 20, lr: 4.49e-04 +2022-05-27 21:51:03,059 INFO [train.py:823] (2/4) Epoch 38, batch 300, loss[loss=0.178, simple_loss=0.2625, pruned_loss=0.04679, over 7283.00 frames.], tot_loss[loss=0.1592, simple_loss=0.249, pruned_loss=0.03464, over 1108120.62 frames.], batch size: 21, lr: 4.49e-04 +2022-05-27 21:51:42,039 INFO [train.py:823] (2/4) Epoch 38, batch 350, loss[loss=0.1369, simple_loss=0.2216, pruned_loss=0.02611, over 6837.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2497, pruned_loss=0.03481, over 1182476.41 frames.], batch size: 15, lr: 4.48e-04 +2022-05-27 21:52:21,221 INFO [train.py:823] (2/4) Epoch 38, batch 400, loss[loss=0.1797, simple_loss=0.2663, pruned_loss=0.04659, over 5132.00 frames.], tot_loss[loss=0.1594, simple_loss=0.2495, pruned_loss=0.03462, over 1235830.53 frames.], batch size: 46, lr: 4.48e-04 +2022-05-27 21:53:00,087 INFO [train.py:823] (2/4) Epoch 38, batch 450, loss[loss=0.1567, simple_loss=0.2553, pruned_loss=0.02908, over 7194.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2486, pruned_loss=0.0345, over 1280930.98 frames.], batch size: 20, lr: 4.48e-04 +2022-05-27 21:53:39,601 INFO [train.py:823] (2/4) Epoch 38, batch 500, loss[loss=0.2062, simple_loss=0.3036, pruned_loss=0.05444, over 7275.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2475, pruned_loss=0.03443, over 1314843.67 frames.], batch size: 21, lr: 4.47e-04 +2022-05-27 21:54:19,922 INFO [train.py:823] (2/4) Epoch 38, batch 550, loss[loss=0.1658, simple_loss=0.2522, pruned_loss=0.03968, over 7199.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2477, pruned_loss=0.03424, over 1332558.80 frames.], batch size: 20, lr: 4.47e-04 +2022-05-27 21:54:59,391 INFO [train.py:823] (2/4) Epoch 38, batch 600, loss[loss=0.1631, simple_loss=0.2548, pruned_loss=0.03571, over 6492.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2488, pruned_loss=0.03486, over 1350199.23 frames.], batch size: 34, lr: 4.47e-04 +2022-05-27 21:55:39,841 INFO [train.py:823] (2/4) Epoch 38, batch 650, loss[loss=0.1507, simple_loss=0.2526, pruned_loss=0.02436, over 7268.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2493, pruned_loss=0.0347, over 1366728.10 frames.], batch size: 20, lr: 4.46e-04 +2022-05-27 21:56:18,895 INFO [train.py:823] (2/4) Epoch 38, batch 700, loss[loss=0.1753, simple_loss=0.263, pruned_loss=0.04374, over 7157.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2499, pruned_loss=0.03491, over 1377520.14 frames.], batch size: 22, lr: 4.46e-04 +2022-05-27 21:56:57,027 INFO [train.py:823] (2/4) Epoch 38, batch 750, loss[loss=0.1854, simple_loss=0.2741, pruned_loss=0.04833, over 7239.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2499, pruned_loss=0.03474, over 1383952.48 frames.], batch size: 24, lr: 4.46e-04 +2022-05-27 21:57:36,094 INFO [train.py:823] (2/4) Epoch 38, batch 800, loss[loss=0.1639, simple_loss=0.2606, pruned_loss=0.03354, over 7371.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2497, pruned_loss=0.0348, over 1386620.75 frames.], batch size: 21, lr: 4.45e-04 +2022-05-27 21:58:15,022 INFO [train.py:823] (2/4) Epoch 38, batch 850, loss[loss=0.2104, simple_loss=0.3021, pruned_loss=0.05934, over 7019.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2502, pruned_loss=0.03513, over 1395745.73 frames.], batch size: 29, lr: 4.45e-04 +2022-05-27 21:58:54,469 INFO [train.py:823] (2/4) Epoch 38, batch 900, loss[loss=0.1514, simple_loss=0.2407, pruned_loss=0.03101, over 6990.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2504, pruned_loss=0.03509, over 1399870.35 frames.], batch size: 16, lr: 4.45e-04 +2022-05-27 21:59:32,700 INFO [train.py:823] (2/4) Epoch 38, batch 950, loss[loss=0.1567, simple_loss=0.2434, pruned_loss=0.03503, over 4917.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2497, pruned_loss=0.03499, over 1374581.31 frames.], batch size: 46, lr: 4.45e-04 +2022-05-27 21:59:46,007 INFO [train.py:823] (2/4) Epoch 39, batch 0, loss[loss=0.1575, simple_loss=0.2484, pruned_loss=0.03331, over 7286.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2484, pruned_loss=0.03331, over 7286.00 frames.], batch size: 19, lr: 4.39e-04 +2022-05-27 22:00:25,304 INFO [train.py:823] (2/4) Epoch 39, batch 50, loss[loss=0.1538, simple_loss=0.2377, pruned_loss=0.03497, over 7414.00 frames.], tot_loss[loss=0.159, simple_loss=0.2488, pruned_loss=0.03466, over 322519.79 frames.], batch size: 22, lr: 4.39e-04 +2022-05-27 22:01:04,278 INFO [train.py:823] (2/4) Epoch 39, batch 100, loss[loss=0.1625, simple_loss=0.242, pruned_loss=0.04152, over 7307.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2468, pruned_loss=0.03418, over 567100.62 frames.], batch size: 18, lr: 4.38e-04 +2022-05-27 22:01:43,932 INFO [train.py:823] (2/4) Epoch 39, batch 150, loss[loss=0.1773, simple_loss=0.2679, pruned_loss=0.04332, over 7215.00 frames.], tot_loss[loss=0.1574, simple_loss=0.246, pruned_loss=0.03433, over 755263.42 frames.], batch size: 25, lr: 4.38e-04 +2022-05-27 22:02:23,482 INFO [train.py:823] (2/4) Epoch 39, batch 200, loss[loss=0.1367, simple_loss=0.2211, pruned_loss=0.02617, over 7405.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2453, pruned_loss=0.03356, over 906540.38 frames.], batch size: 19, lr: 4.38e-04 +2022-05-27 22:03:03,310 INFO [train.py:823] (2/4) Epoch 39, batch 250, loss[loss=0.1721, simple_loss=0.2516, pruned_loss=0.04627, over 7294.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2464, pruned_loss=0.03365, over 1020833.53 frames.], batch size: 19, lr: 4.37e-04 +2022-05-27 22:03:42,597 INFO [train.py:823] (2/4) Epoch 39, batch 300, loss[loss=0.1387, simple_loss=0.2239, pruned_loss=0.02672, over 7298.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2467, pruned_loss=0.03422, over 1113439.13 frames.], batch size: 19, lr: 4.37e-04 +2022-05-27 22:04:21,976 INFO [train.py:823] (2/4) Epoch 39, batch 350, loss[loss=0.1416, simple_loss=0.2334, pruned_loss=0.02495, over 7386.00 frames.], tot_loss[loss=0.157, simple_loss=0.2466, pruned_loss=0.03368, over 1184452.49 frames.], batch size: 20, lr: 4.37e-04 +2022-05-27 22:05:01,305 INFO [train.py:823] (2/4) Epoch 39, batch 400, loss[loss=0.1422, simple_loss=0.2193, pruned_loss=0.0325, over 7031.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2475, pruned_loss=0.03385, over 1242141.35 frames.], batch size: 17, lr: 4.36e-04 +2022-05-27 22:05:40,541 INFO [train.py:823] (2/4) Epoch 39, batch 450, loss[loss=0.1692, simple_loss=0.2636, pruned_loss=0.03735, over 6960.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2476, pruned_loss=0.03373, over 1281383.06 frames.], batch size: 26, lr: 4.36e-04 +2022-05-27 22:06:19,116 INFO [train.py:823] (2/4) Epoch 39, batch 500, loss[loss=0.1674, simple_loss=0.2555, pruned_loss=0.03962, over 5302.00 frames.], tot_loss[loss=0.1571, simple_loss=0.247, pruned_loss=0.03363, over 1310883.99 frames.], batch size: 48, lr: 4.36e-04 +2022-05-27 22:06:58,253 INFO [train.py:823] (2/4) Epoch 39, batch 550, loss[loss=0.1889, simple_loss=0.271, pruned_loss=0.0534, over 7228.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2468, pruned_loss=0.03323, over 1332022.89 frames.], batch size: 25, lr: 4.36e-04 +2022-05-27 22:07:37,669 INFO [train.py:823] (2/4) Epoch 39, batch 600, loss[loss=0.1319, simple_loss=0.2181, pruned_loss=0.02279, over 7437.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2476, pruned_loss=0.034, over 1355136.27 frames.], batch size: 18, lr: 4.35e-04 +2022-05-27 22:08:17,283 INFO [train.py:823] (2/4) Epoch 39, batch 650, loss[loss=0.1534, simple_loss=0.2431, pruned_loss=0.03183, over 7395.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2476, pruned_loss=0.03374, over 1373949.39 frames.], batch size: 19, lr: 4.35e-04 +2022-05-27 22:08:55,780 INFO [train.py:823] (2/4) Epoch 39, batch 700, loss[loss=0.1693, simple_loss=0.267, pruned_loss=0.03578, over 7233.00 frames.], tot_loss[loss=0.158, simple_loss=0.2482, pruned_loss=0.03387, over 1383205.82 frames.], batch size: 24, lr: 4.35e-04 +2022-05-27 22:09:34,858 INFO [train.py:823] (2/4) Epoch 39, batch 750, loss[loss=0.155, simple_loss=0.2503, pruned_loss=0.02986, over 7366.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2469, pruned_loss=0.03319, over 1389558.60 frames.], batch size: 20, lr: 4.34e-04 +2022-05-27 22:10:14,110 INFO [train.py:823] (2/4) Epoch 39, batch 800, loss[loss=0.1396, simple_loss=0.2281, pruned_loss=0.02556, over 7194.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2471, pruned_loss=0.03322, over 1398442.44 frames.], batch size: 18, lr: 4.34e-04 +2022-05-27 22:10:52,862 INFO [train.py:823] (2/4) Epoch 39, batch 850, loss[loss=0.1892, simple_loss=0.281, pruned_loss=0.04867, over 7340.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2475, pruned_loss=0.03355, over 1397190.38 frames.], batch size: 23, lr: 4.34e-04 +2022-05-27 22:11:31,909 INFO [train.py:823] (2/4) Epoch 39, batch 900, loss[loss=0.1729, simple_loss=0.2716, pruned_loss=0.03714, over 6957.00 frames.], tot_loss[loss=0.1583, simple_loss=0.249, pruned_loss=0.03379, over 1390177.65 frames.], batch size: 29, lr: 4.34e-04 +2022-05-27 22:12:11,464 INFO [train.py:823] (2/4) Epoch 39, batch 950, loss[loss=0.1563, simple_loss=0.2409, pruned_loss=0.03582, over 4559.00 frames.], tot_loss[loss=0.159, simple_loss=0.2489, pruned_loss=0.03454, over 1363967.56 frames.], batch size: 46, lr: 4.33e-04 +2022-05-27 22:12:24,468 INFO [train.py:823] (2/4) Epoch 40, batch 0, loss[loss=0.1642, simple_loss=0.2511, pruned_loss=0.03869, over 7162.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2511, pruned_loss=0.03869, over 7162.00 frames.], batch size: 23, lr: 4.28e-04 +2022-05-27 22:13:03,101 INFO [train.py:823] (2/4) Epoch 40, batch 50, loss[loss=0.1232, simple_loss=0.2177, pruned_loss=0.01435, over 7111.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2482, pruned_loss=0.0342, over 317776.38 frames.], batch size: 20, lr: 4.28e-04 +2022-05-27 22:13:43,986 INFO [train.py:823] (2/4) Epoch 40, batch 100, loss[loss=0.128, simple_loss=0.2073, pruned_loss=0.02434, over 7233.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2463, pruned_loss=0.03362, over 558916.89 frames.], batch size: 16, lr: 4.27e-04 +2022-05-27 22:14:22,937 INFO [train.py:823] (2/4) Epoch 40, batch 150, loss[loss=0.176, simple_loss=0.2665, pruned_loss=0.0427, over 6994.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2477, pruned_loss=0.03447, over 746971.36 frames.], batch size: 29, lr: 4.27e-04 +2022-05-27 22:15:02,209 INFO [train.py:823] (2/4) Epoch 40, batch 200, loss[loss=0.1625, simple_loss=0.2559, pruned_loss=0.03457, over 7180.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2484, pruned_loss=0.03465, over 898142.68 frames.], batch size: 21, lr: 4.27e-04 +2022-05-27 22:15:42,488 INFO [train.py:823] (2/4) Epoch 40, batch 250, loss[loss=0.1424, simple_loss=0.2253, pruned_loss=0.02979, over 7274.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2481, pruned_loss=0.03424, over 1015214.19 frames.], batch size: 16, lr: 4.26e-04 +2022-05-27 22:16:22,013 INFO [train.py:823] (2/4) Epoch 40, batch 300, loss[loss=0.1411, simple_loss=0.2298, pruned_loss=0.02619, over 7375.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2475, pruned_loss=0.03433, over 1105586.30 frames.], batch size: 20, lr: 4.26e-04 +2022-05-27 22:17:01,104 INFO [train.py:823] (2/4) Epoch 40, batch 350, loss[loss=0.164, simple_loss=0.2637, pruned_loss=0.03221, over 6579.00 frames.], tot_loss[loss=0.158, simple_loss=0.2475, pruned_loss=0.03424, over 1178081.12 frames.], batch size: 34, lr: 4.26e-04 +2022-05-27 22:17:42,250 INFO [train.py:823] (2/4) Epoch 40, batch 400, loss[loss=0.1274, simple_loss=0.2147, pruned_loss=0.02009, over 7001.00 frames.], tot_loss[loss=0.1575, simple_loss=0.247, pruned_loss=0.03395, over 1236837.38 frames.], batch size: 16, lr: 4.26e-04 +2022-05-27 22:18:21,047 INFO [train.py:823] (2/4) Epoch 40, batch 450, loss[loss=0.1629, simple_loss=0.2294, pruned_loss=0.0482, over 6797.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2465, pruned_loss=0.03383, over 1276629.99 frames.], batch size: 15, lr: 4.25e-04 +2022-05-27 22:19:00,452 INFO [train.py:823] (2/4) Epoch 40, batch 500, loss[loss=0.1547, simple_loss=0.2485, pruned_loss=0.03048, over 7373.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2471, pruned_loss=0.03378, over 1308992.33 frames.], batch size: 20, lr: 4.25e-04 +2022-05-27 22:19:39,660 INFO [train.py:823] (2/4) Epoch 40, batch 550, loss[loss=0.1679, simple_loss=0.2784, pruned_loss=0.02868, over 7293.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2463, pruned_loss=0.03346, over 1336194.86 frames.], batch size: 22, lr: 4.25e-04 +2022-05-27 22:20:18,714 INFO [train.py:823] (2/4) Epoch 40, batch 600, loss[loss=0.1493, simple_loss=0.2441, pruned_loss=0.02732, over 7299.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2473, pruned_loss=0.03385, over 1355579.49 frames.], batch size: 22, lr: 4.24e-04 +2022-05-27 22:20:57,883 INFO [train.py:823] (2/4) Epoch 40, batch 650, loss[loss=0.1479, simple_loss=0.2367, pruned_loss=0.02957, over 7198.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2475, pruned_loss=0.03392, over 1366307.21 frames.], batch size: 19, lr: 4.24e-04 +2022-05-27 22:21:37,032 INFO [train.py:823] (2/4) Epoch 40, batch 700, loss[loss=0.1513, simple_loss=0.2484, pruned_loss=0.02714, over 7204.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2492, pruned_loss=0.03386, over 1379016.92 frames.], batch size: 20, lr: 4.24e-04 +2022-05-27 22:22:15,947 INFO [train.py:823] (2/4) Epoch 40, batch 750, loss[loss=0.1774, simple_loss=0.2645, pruned_loss=0.04514, over 4961.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2486, pruned_loss=0.03356, over 1387943.29 frames.], batch size: 48, lr: 4.24e-04 +2022-05-27 22:22:55,324 INFO [train.py:823] (2/4) Epoch 40, batch 800, loss[loss=0.183, simple_loss=0.2716, pruned_loss=0.0472, over 7187.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2488, pruned_loss=0.03411, over 1388961.48 frames.], batch size: 21, lr: 4.23e-04 +2022-05-27 22:23:34,229 INFO [train.py:823] (2/4) Epoch 40, batch 850, loss[loss=0.157, simple_loss=0.2462, pruned_loss=0.03389, over 7171.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2488, pruned_loss=0.03434, over 1397182.15 frames.], batch size: 22, lr: 4.23e-04 +2022-05-27 22:24:12,887 INFO [train.py:823] (2/4) Epoch 40, batch 900, loss[loss=0.1518, simple_loss=0.2409, pruned_loss=0.03141, over 7364.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2489, pruned_loss=0.03431, over 1389789.68 frames.], batch size: 20, lr: 4.23e-04 +2022-05-27 22:25:03,858 INFO [train.py:823] (2/4) Epoch 41, batch 0, loss[loss=0.1437, simple_loss=0.2329, pruned_loss=0.02725, over 7095.00 frames.], tot_loss[loss=0.1437, simple_loss=0.2329, pruned_loss=0.02725, over 7095.00 frames.], batch size: 19, lr: 4.17e-04 +2022-05-27 22:25:43,069 INFO [train.py:823] (2/4) Epoch 41, batch 50, loss[loss=0.16, simple_loss=0.2601, pruned_loss=0.02996, over 7378.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2486, pruned_loss=0.03408, over 321844.31 frames.], batch size: 20, lr: 4.17e-04 +2022-05-27 22:26:21,978 INFO [train.py:823] (2/4) Epoch 41, batch 100, loss[loss=0.1544, simple_loss=0.2338, pruned_loss=0.03756, over 7096.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2477, pruned_loss=0.03321, over 561684.71 frames.], batch size: 18, lr: 4.17e-04 +2022-05-27 22:27:01,019 INFO [train.py:823] (2/4) Epoch 41, batch 150, loss[loss=0.1656, simple_loss=0.2622, pruned_loss=0.03454, over 7023.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2476, pruned_loss=0.03304, over 753825.87 frames.], batch size: 26, lr: 4.17e-04 +2022-05-27 22:27:40,128 INFO [train.py:823] (2/4) Epoch 41, batch 200, loss[loss=0.1735, simple_loss=0.263, pruned_loss=0.04199, over 7397.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2476, pruned_loss=0.03294, over 905889.70 frames.], batch size: 19, lr: 4.16e-04 +2022-05-27 22:28:19,504 INFO [train.py:823] (2/4) Epoch 41, batch 250, loss[loss=0.1626, simple_loss=0.2468, pruned_loss=0.03919, over 7105.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2467, pruned_loss=0.03304, over 1017259.74 frames.], batch size: 19, lr: 4.16e-04 +2022-05-27 22:28:58,036 INFO [train.py:823] (2/4) Epoch 41, batch 300, loss[loss=0.1417, simple_loss=0.2303, pruned_loss=0.02652, over 7374.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2464, pruned_loss=0.03325, over 1107953.71 frames.], batch size: 20, lr: 4.16e-04 +2022-05-27 22:29:37,001 INFO [train.py:823] (2/4) Epoch 41, batch 350, loss[loss=0.183, simple_loss=0.2674, pruned_loss=0.04931, over 7171.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2477, pruned_loss=0.03391, over 1175488.16 frames.], batch size: 22, lr: 4.15e-04 +2022-05-27 22:30:15,408 INFO [train.py:823] (2/4) Epoch 41, batch 400, loss[loss=0.1727, simple_loss=0.2654, pruned_loss=0.04001, over 7162.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2477, pruned_loss=0.03383, over 1223236.50 frames.], batch size: 23, lr: 4.15e-04 +2022-05-27 22:30:54,624 INFO [train.py:823] (2/4) Epoch 41, batch 450, loss[loss=0.1252, simple_loss=0.2173, pruned_loss=0.01653, over 7098.00 frames.], tot_loss[loss=0.1579, simple_loss=0.248, pruned_loss=0.03393, over 1264989.55 frames.], batch size: 18, lr: 4.15e-04 +2022-05-27 22:31:33,855 INFO [train.py:823] (2/4) Epoch 41, batch 500, loss[loss=0.1503, simple_loss=0.2457, pruned_loss=0.02744, over 7312.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2486, pruned_loss=0.03344, over 1299456.59 frames.], batch size: 22, lr: 4.15e-04 +2022-05-27 22:32:12,774 INFO [train.py:823] (2/4) Epoch 41, batch 550, loss[loss=0.1733, simple_loss=0.2572, pruned_loss=0.04465, over 7210.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2483, pruned_loss=0.03356, over 1322169.16 frames.], batch size: 19, lr: 4.14e-04 +2022-05-27 22:32:51,823 INFO [train.py:823] (2/4) Epoch 41, batch 600, loss[loss=0.2144, simple_loss=0.2988, pruned_loss=0.06496, over 7169.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2478, pruned_loss=0.03354, over 1339272.29 frames.], batch size: 21, lr: 4.14e-04 +2022-05-27 22:33:31,352 INFO [train.py:823] (2/4) Epoch 41, batch 650, loss[loss=0.1502, simple_loss=0.2423, pruned_loss=0.02903, over 7179.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2474, pruned_loss=0.03334, over 1358189.09 frames.], batch size: 21, lr: 4.14e-04 +2022-05-27 22:34:10,141 INFO [train.py:823] (2/4) Epoch 41, batch 700, loss[loss=0.1435, simple_loss=0.2313, pruned_loss=0.02789, over 7232.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2472, pruned_loss=0.03313, over 1371333.20 frames.], batch size: 16, lr: 4.14e-04 +2022-05-27 22:34:50,575 INFO [train.py:823] (2/4) Epoch 41, batch 750, loss[loss=0.1465, simple_loss=0.233, pruned_loss=0.02995, over 7186.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2474, pruned_loss=0.03342, over 1380074.90 frames.], batch size: 18, lr: 4.13e-04 +2022-05-27 22:35:29,199 INFO [train.py:823] (2/4) Epoch 41, batch 800, loss[loss=0.1413, simple_loss=0.2227, pruned_loss=0.0299, over 7306.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2474, pruned_loss=0.03353, over 1382295.94 frames.], batch size: 17, lr: 4.13e-04 +2022-05-27 22:36:09,517 INFO [train.py:823] (2/4) Epoch 41, batch 850, loss[loss=0.1473, simple_loss=0.2327, pruned_loss=0.031, over 7294.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2481, pruned_loss=0.03352, over 1394329.10 frames.], batch size: 19, lr: 4.13e-04 +2022-05-27 22:36:48,686 INFO [train.py:823] (2/4) Epoch 41, batch 900, loss[loss=0.1258, simple_loss=0.2065, pruned_loss=0.02251, over 7286.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2474, pruned_loss=0.03368, over 1399417.71 frames.], batch size: 17, lr: 4.13e-04 +2022-05-27 22:37:42,353 INFO [train.py:823] (2/4) Epoch 42, batch 0, loss[loss=0.1797, simple_loss=0.2805, pruned_loss=0.03944, over 7290.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2805, pruned_loss=0.03944, over 7290.00 frames.], batch size: 21, lr: 4.07e-04 +2022-05-27 22:38:21,750 INFO [train.py:823] (2/4) Epoch 42, batch 50, loss[loss=0.1533, simple_loss=0.2356, pruned_loss=0.03549, over 7379.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2447, pruned_loss=0.03295, over 323064.14 frames.], batch size: 19, lr: 4.07e-04 +2022-05-27 22:39:02,338 INFO [train.py:823] (2/4) Epoch 42, batch 100, loss[loss=0.127, simple_loss=0.2053, pruned_loss=0.02436, over 6806.00 frames.], tot_loss[loss=0.154, simple_loss=0.2446, pruned_loss=0.03165, over 565756.10 frames.], batch size: 15, lr: 4.07e-04 +2022-05-27 22:39:41,292 INFO [train.py:823] (2/4) Epoch 42, batch 150, loss[loss=0.1915, simple_loss=0.2869, pruned_loss=0.04805, over 7165.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2452, pruned_loss=0.03233, over 755606.38 frames.], batch size: 22, lr: 4.07e-04 +2022-05-27 22:40:22,083 INFO [train.py:823] (2/4) Epoch 42, batch 200, loss[loss=0.1591, simple_loss=0.2484, pruned_loss=0.03493, over 7218.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2462, pruned_loss=0.03326, over 900922.77 frames.], batch size: 24, lr: 4.06e-04 +2022-05-27 22:41:01,145 INFO [train.py:823] (2/4) Epoch 42, batch 250, loss[loss=0.1427, simple_loss=0.2211, pruned_loss=0.03219, over 7150.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2466, pruned_loss=0.03345, over 1016898.70 frames.], batch size: 17, lr: 4.06e-04 +2022-05-27 22:41:40,223 INFO [train.py:823] (2/4) Epoch 42, batch 300, loss[loss=0.1791, simple_loss=0.2688, pruned_loss=0.04469, over 7187.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2472, pruned_loss=0.03381, over 1100518.98 frames.], batch size: 21, lr: 4.06e-04 +2022-05-27 22:42:18,672 INFO [train.py:823] (2/4) Epoch 42, batch 350, loss[loss=0.1412, simple_loss=0.2228, pruned_loss=0.02982, over 7138.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2461, pruned_loss=0.03373, over 1166990.24 frames.], batch size: 17, lr: 4.06e-04 +2022-05-27 22:42:57,709 INFO [train.py:823] (2/4) Epoch 42, batch 400, loss[loss=0.1383, simple_loss=0.224, pruned_loss=0.02629, over 7308.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2456, pruned_loss=0.03313, over 1217435.81 frames.], batch size: 17, lr: 4.05e-04 +2022-05-27 22:43:36,651 INFO [train.py:823] (2/4) Epoch 42, batch 450, loss[loss=0.1659, simple_loss=0.2557, pruned_loss=0.03806, over 7235.00 frames.], tot_loss[loss=0.1575, simple_loss=0.2474, pruned_loss=0.03379, over 1267226.11 frames.], batch size: 25, lr: 4.05e-04 +2022-05-27 22:44:16,173 INFO [train.py:823] (2/4) Epoch 42, batch 500, loss[loss=0.1511, simple_loss=0.2263, pruned_loss=0.03796, over 7152.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2469, pruned_loss=0.03345, over 1301206.65 frames.], batch size: 17, lr: 4.05e-04 +2022-05-27 22:44:54,543 INFO [train.py:823] (2/4) Epoch 42, batch 550, loss[loss=0.1723, simple_loss=0.2575, pruned_loss=0.04354, over 7203.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2471, pruned_loss=0.03358, over 1322155.82 frames.], batch size: 18, lr: 4.05e-04 +2022-05-27 22:45:33,818 INFO [train.py:823] (2/4) Epoch 42, batch 600, loss[loss=0.1484, simple_loss=0.2405, pruned_loss=0.02811, over 7198.00 frames.], tot_loss[loss=0.158, simple_loss=0.248, pruned_loss=0.03403, over 1343720.04 frames.], batch size: 20, lr: 4.04e-04 +2022-05-27 22:46:12,726 INFO [train.py:823] (2/4) Epoch 42, batch 650, loss[loss=0.1748, simple_loss=0.2594, pruned_loss=0.04508, over 7154.00 frames.], tot_loss[loss=0.158, simple_loss=0.2483, pruned_loss=0.03388, over 1364333.07 frames.], batch size: 23, lr: 4.04e-04 +2022-05-27 22:46:51,921 INFO [train.py:823] (2/4) Epoch 42, batch 700, loss[loss=0.1585, simple_loss=0.2638, pruned_loss=0.02657, over 6795.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2487, pruned_loss=0.03429, over 1370183.64 frames.], batch size: 29, lr: 4.04e-04 +2022-05-27 22:47:31,141 INFO [train.py:823] (2/4) Epoch 42, batch 750, loss[loss=0.1706, simple_loss=0.2662, pruned_loss=0.03752, over 7371.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2481, pruned_loss=0.03366, over 1384097.49 frames.], batch size: 21, lr: 4.04e-04 +2022-05-27 22:48:10,598 INFO [train.py:823] (2/4) Epoch 42, batch 800, loss[loss=0.1571, simple_loss=0.2478, pruned_loss=0.03316, over 6404.00 frames.], tot_loss[loss=0.1577, simple_loss=0.248, pruned_loss=0.0337, over 1392350.50 frames.], batch size: 34, lr: 4.03e-04 +2022-05-27 22:48:49,704 INFO [train.py:823] (2/4) Epoch 42, batch 850, loss[loss=0.137, simple_loss=0.2181, pruned_loss=0.0279, over 7434.00 frames.], tot_loss[loss=0.157, simple_loss=0.2474, pruned_loss=0.03328, over 1398658.20 frames.], batch size: 18, lr: 4.03e-04 +2022-05-27 22:49:29,068 INFO [train.py:823] (2/4) Epoch 42, batch 900, loss[loss=0.18, simple_loss=0.263, pruned_loss=0.04855, over 4759.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2471, pruned_loss=0.03326, over 1397288.89 frames.], batch size: 46, lr: 4.03e-04 +2022-05-27 22:50:20,113 INFO [train.py:823] (2/4) Epoch 43, batch 0, loss[loss=0.1365, simple_loss=0.223, pruned_loss=0.02502, over 7289.00 frames.], tot_loss[loss=0.1365, simple_loss=0.223, pruned_loss=0.02502, over 7289.00 frames.], batch size: 19, lr: 3.98e-04 +2022-05-27 22:50:59,560 INFO [train.py:823] (2/4) Epoch 43, batch 50, loss[loss=0.1748, simple_loss=0.2653, pruned_loss=0.04214, over 7386.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2443, pruned_loss=0.03279, over 321739.62 frames.], batch size: 20, lr: 3.98e-04 +2022-05-27 22:51:38,834 INFO [train.py:823] (2/4) Epoch 43, batch 100, loss[loss=0.153, simple_loss=0.2522, pruned_loss=0.02686, over 7200.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2435, pruned_loss=0.03239, over 565553.90 frames.], batch size: 23, lr: 3.97e-04 +2022-05-27 22:52:22,706 INFO [train.py:823] (2/4) Epoch 43, batch 150, loss[loss=0.1732, simple_loss=0.2716, pruned_loss=0.03739, over 6501.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2441, pruned_loss=0.03286, over 753813.18 frames.], batch size: 34, lr: 3.97e-04 +2022-05-27 22:53:01,516 INFO [train.py:823] (2/4) Epoch 43, batch 200, loss[loss=0.1849, simple_loss=0.2745, pruned_loss=0.04761, over 7349.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2448, pruned_loss=0.03311, over 905368.35 frames.], batch size: 23, lr: 3.97e-04 +2022-05-27 22:53:40,829 INFO [train.py:823] (2/4) Epoch 43, batch 250, loss[loss=0.131, simple_loss=0.2167, pruned_loss=0.02265, over 7291.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2444, pruned_loss=0.03295, over 1021519.30 frames.], batch size: 18, lr: 3.97e-04 +2022-05-27 22:54:19,571 INFO [train.py:823] (2/4) Epoch 43, batch 300, loss[loss=0.1525, simple_loss=0.2422, pruned_loss=0.03146, over 7097.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2451, pruned_loss=0.0333, over 1101795.68 frames.], batch size: 18, lr: 3.96e-04 +2022-05-27 22:54:59,048 INFO [train.py:823] (2/4) Epoch 43, batch 350, loss[loss=0.1659, simple_loss=0.2564, pruned_loss=0.03768, over 7345.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2459, pruned_loss=0.03361, over 1174025.75 frames.], batch size: 23, lr: 3.96e-04 +2022-05-27 22:55:37,607 INFO [train.py:823] (2/4) Epoch 43, batch 400, loss[loss=0.1612, simple_loss=0.2577, pruned_loss=0.03231, over 7198.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2453, pruned_loss=0.0332, over 1228710.07 frames.], batch size: 20, lr: 3.96e-04 +2022-05-27 22:56:17,119 INFO [train.py:823] (2/4) Epoch 43, batch 450, loss[loss=0.1607, simple_loss=0.2631, pruned_loss=0.02914, over 7188.00 frames.], tot_loss[loss=0.156, simple_loss=0.2459, pruned_loss=0.03306, over 1275722.46 frames.], batch size: 21, lr: 3.96e-04 +2022-05-27 22:56:56,297 INFO [train.py:823] (2/4) Epoch 43, batch 500, loss[loss=0.1272, simple_loss=0.2006, pruned_loss=0.02691, over 7024.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2453, pruned_loss=0.03252, over 1307822.06 frames.], batch size: 17, lr: 3.95e-04 +2022-05-27 22:57:35,575 INFO [train.py:823] (2/4) Epoch 43, batch 550, loss[loss=0.1863, simple_loss=0.2891, pruned_loss=0.04177, over 7286.00 frames.], tot_loss[loss=0.1564, simple_loss=0.247, pruned_loss=0.03295, over 1337515.09 frames.], batch size: 21, lr: 3.95e-04 +2022-05-27 22:58:14,106 INFO [train.py:823] (2/4) Epoch 43, batch 600, loss[loss=0.1777, simple_loss=0.2687, pruned_loss=0.04333, over 7181.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2463, pruned_loss=0.03252, over 1357618.60 frames.], batch size: 22, lr: 3.95e-04 +2022-05-27 22:58:54,266 INFO [train.py:823] (2/4) Epoch 43, batch 650, loss[loss=0.1675, simple_loss=0.2636, pruned_loss=0.03573, over 7189.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2469, pruned_loss=0.03277, over 1374530.16 frames.], batch size: 20, lr: 3.95e-04 +2022-05-27 22:59:34,016 INFO [train.py:823] (2/4) Epoch 43, batch 700, loss[loss=0.1319, simple_loss=0.2166, pruned_loss=0.0236, over 7025.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2468, pruned_loss=0.03277, over 1383462.69 frames.], batch size: 17, lr: 3.94e-04 +2022-05-27 23:00:13,192 INFO [train.py:823] (2/4) Epoch 43, batch 750, loss[loss=0.1469, simple_loss=0.2429, pruned_loss=0.02545, over 7194.00 frames.], tot_loss[loss=0.156, simple_loss=0.2468, pruned_loss=0.03265, over 1392570.20 frames.], batch size: 21, lr: 3.94e-04 +2022-05-27 23:00:51,814 INFO [train.py:823] (2/4) Epoch 43, batch 800, loss[loss=0.1546, simple_loss=0.2537, pruned_loss=0.02776, over 7310.00 frames.], tot_loss[loss=0.1555, simple_loss=0.246, pruned_loss=0.03253, over 1401375.89 frames.], batch size: 22, lr: 3.94e-04 +2022-05-27 23:01:30,943 INFO [train.py:823] (2/4) Epoch 43, batch 850, loss[loss=0.1561, simple_loss=0.25, pruned_loss=0.03112, over 7171.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2466, pruned_loss=0.03289, over 1403757.73 frames.], batch size: 22, lr: 3.94e-04 +2022-05-27 23:02:11,148 INFO [train.py:823] (2/4) Epoch 43, batch 900, loss[loss=0.1484, simple_loss=0.2266, pruned_loss=0.03514, over 6817.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2458, pruned_loss=0.03261, over 1402849.50 frames.], batch size: 15, lr: 3.93e-04 +2022-05-27 23:03:00,554 INFO [train.py:823] (2/4) Epoch 44, batch 0, loss[loss=0.1592, simple_loss=0.2509, pruned_loss=0.03369, over 7302.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2509, pruned_loss=0.03369, over 7302.00 frames.], batch size: 22, lr: 3.89e-04 +2022-05-27 23:03:41,240 INFO [train.py:823] (2/4) Epoch 44, batch 50, loss[loss=0.1349, simple_loss=0.2222, pruned_loss=0.02382, over 7033.00 frames.], tot_loss[loss=0.1524, simple_loss=0.241, pruned_loss=0.03188, over 321769.30 frames.], batch size: 17, lr: 3.89e-04 +2022-05-27 23:04:20,546 INFO [train.py:823] (2/4) Epoch 44, batch 100, loss[loss=0.1542, simple_loss=0.2539, pruned_loss=0.02728, over 7275.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2433, pruned_loss=0.03155, over 567243.14 frames.], batch size: 20, lr: 3.88e-04 +2022-05-27 23:04:59,695 INFO [train.py:823] (2/4) Epoch 44, batch 150, loss[loss=0.1543, simple_loss=0.2564, pruned_loss=0.02613, over 7284.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2443, pruned_loss=0.03198, over 758161.53 frames.], batch size: 20, lr: 3.88e-04 +2022-05-27 23:05:38,964 INFO [train.py:823] (2/4) Epoch 44, batch 200, loss[loss=0.194, simple_loss=0.2769, pruned_loss=0.05562, over 7229.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2444, pruned_loss=0.03191, over 905893.93 frames.], batch size: 24, lr: 3.88e-04 +2022-05-27 23:06:18,036 INFO [train.py:823] (2/4) Epoch 44, batch 250, loss[loss=0.1608, simple_loss=0.2605, pruned_loss=0.03055, over 7138.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2447, pruned_loss=0.03162, over 1021274.96 frames.], batch size: 23, lr: 3.88e-04 +2022-05-27 23:06:56,913 INFO [train.py:823] (2/4) Epoch 44, batch 300, loss[loss=0.1808, simple_loss=0.2697, pruned_loss=0.04593, over 7280.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2448, pruned_loss=0.0317, over 1108516.43 frames.], batch size: 21, lr: 3.87e-04 +2022-05-27 23:07:35,782 INFO [train.py:823] (2/4) Epoch 44, batch 350, loss[loss=0.1409, simple_loss=0.2234, pruned_loss=0.0292, over 7026.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2452, pruned_loss=0.03149, over 1171072.44 frames.], batch size: 16, lr: 3.87e-04 +2022-05-27 23:08:14,808 INFO [train.py:823] (2/4) Epoch 44, batch 400, loss[loss=0.167, simple_loss=0.255, pruned_loss=0.03952, over 4690.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2452, pruned_loss=0.03153, over 1221850.68 frames.], batch size: 46, lr: 3.87e-04 +2022-05-27 23:08:53,876 INFO [train.py:823] (2/4) Epoch 44, batch 450, loss[loss=0.162, simple_loss=0.2523, pruned_loss=0.03589, over 7228.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2461, pruned_loss=0.03183, over 1265205.07 frames.], batch size: 25, lr: 3.87e-04 +2022-05-27 23:09:33,471 INFO [train.py:823] (2/4) Epoch 44, batch 500, loss[loss=0.1798, simple_loss=0.2664, pruned_loss=0.04664, over 7160.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2454, pruned_loss=0.03181, over 1302161.77 frames.], batch size: 17, lr: 3.86e-04 +2022-05-27 23:10:12,713 INFO [train.py:823] (2/4) Epoch 44, batch 550, loss[loss=0.1562, simple_loss=0.2485, pruned_loss=0.032, over 7201.00 frames.], tot_loss[loss=0.154, simple_loss=0.2449, pruned_loss=0.03155, over 1330436.22 frames.], batch size: 24, lr: 3.86e-04 +2022-05-27 23:10:52,030 INFO [train.py:823] (2/4) Epoch 44, batch 600, loss[loss=0.1503, simple_loss=0.2357, pruned_loss=0.0324, over 7388.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2455, pruned_loss=0.03185, over 1352992.96 frames.], batch size: 19, lr: 3.86e-04 +2022-05-27 23:11:30,667 INFO [train.py:823] (2/4) Epoch 44, batch 650, loss[loss=0.1784, simple_loss=0.2707, pruned_loss=0.04303, over 7411.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2455, pruned_loss=0.03216, over 1367394.37 frames.], batch size: 22, lr: 3.86e-04 +2022-05-27 23:12:09,880 INFO [train.py:823] (2/4) Epoch 44, batch 700, loss[loss=0.1593, simple_loss=0.2522, pruned_loss=0.03324, over 7160.00 frames.], tot_loss[loss=0.155, simple_loss=0.2455, pruned_loss=0.03223, over 1378499.08 frames.], batch size: 23, lr: 3.85e-04 +2022-05-27 23:12:48,591 INFO [train.py:823] (2/4) Epoch 44, batch 750, loss[loss=0.1482, simple_loss=0.2315, pruned_loss=0.03242, over 7167.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2448, pruned_loss=0.03197, over 1390633.46 frames.], batch size: 17, lr: 3.85e-04 +2022-05-27 23:13:27,414 INFO [train.py:823] (2/4) Epoch 44, batch 800, loss[loss=0.1729, simple_loss=0.2699, pruned_loss=0.03796, over 7214.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2457, pruned_loss=0.03226, over 1397356.89 frames.], batch size: 25, lr: 3.85e-04 +2022-05-27 23:14:06,930 INFO [train.py:823] (2/4) Epoch 44, batch 850, loss[loss=0.1506, simple_loss=0.2324, pruned_loss=0.03439, over 6774.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2458, pruned_loss=0.03243, over 1402905.10 frames.], batch size: 15, lr: 3.85e-04 +2022-05-27 23:14:46,178 INFO [train.py:823] (2/4) Epoch 44, batch 900, loss[loss=0.1292, simple_loss=0.2077, pruned_loss=0.02529, over 7292.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2449, pruned_loss=0.03213, over 1400937.24 frames.], batch size: 17, lr: 3.85e-04 +2022-05-27 23:15:24,415 INFO [train.py:823] (2/4) Epoch 44, batch 950, loss[loss=0.1316, simple_loss=0.226, pruned_loss=0.01858, over 5057.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2447, pruned_loss=0.03255, over 1377121.19 frames.], batch size: 47, lr: 3.84e-04 +2022-05-27 23:15:37,750 INFO [train.py:823] (2/4) Epoch 45, batch 0, loss[loss=0.1393, simple_loss=0.2375, pruned_loss=0.02051, over 7270.00 frames.], tot_loss[loss=0.1393, simple_loss=0.2375, pruned_loss=0.02051, over 7270.00 frames.], batch size: 20, lr: 3.80e-04 +2022-05-27 23:16:17,176 INFO [train.py:823] (2/4) Epoch 45, batch 50, loss[loss=0.1506, simple_loss=0.2406, pruned_loss=0.03031, over 7299.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2448, pruned_loss=0.03314, over 324050.05 frames.], batch size: 21, lr: 3.80e-04 +2022-05-27 23:16:56,338 INFO [train.py:823] (2/4) Epoch 45, batch 100, loss[loss=0.1643, simple_loss=0.2668, pruned_loss=0.03091, over 7380.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2444, pruned_loss=0.03208, over 567212.27 frames.], batch size: 21, lr: 3.80e-04 +2022-05-27 23:17:35,602 INFO [train.py:823] (2/4) Epoch 45, batch 150, loss[loss=0.1375, simple_loss=0.2226, pruned_loss=0.02622, over 6785.00 frames.], tot_loss[loss=0.1539, simple_loss=0.244, pruned_loss=0.03188, over 752280.75 frames.], batch size: 15, lr: 3.79e-04 +2022-05-27 23:18:14,595 INFO [train.py:823] (2/4) Epoch 45, batch 200, loss[loss=0.1712, simple_loss=0.2663, pruned_loss=0.03801, over 4615.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2435, pruned_loss=0.03169, over 896947.16 frames.], batch size: 47, lr: 3.79e-04 +2022-05-27 23:18:53,797 INFO [train.py:823] (2/4) Epoch 45, batch 250, loss[loss=0.1555, simple_loss=0.2467, pruned_loss=0.03218, over 6457.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2446, pruned_loss=0.03191, over 1010252.02 frames.], batch size: 34, lr: 3.79e-04 +2022-05-27 23:19:32,721 INFO [train.py:823] (2/4) Epoch 45, batch 300, loss[loss=0.171, simple_loss=0.2618, pruned_loss=0.04012, over 7156.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2436, pruned_loss=0.03199, over 1099559.97 frames.], batch size: 23, lr: 3.79e-04 +2022-05-27 23:20:11,929 INFO [train.py:823] (2/4) Epoch 45, batch 350, loss[loss=0.1387, simple_loss=0.2377, pruned_loss=0.01984, over 7426.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2438, pruned_loss=0.03192, over 1171451.38 frames.], batch size: 22, lr: 3.78e-04 +2022-05-27 23:20:50,948 INFO [train.py:823] (2/4) Epoch 45, batch 400, loss[loss=0.1709, simple_loss=0.2534, pruned_loss=0.04422, over 7390.00 frames.], tot_loss[loss=0.154, simple_loss=0.2442, pruned_loss=0.03188, over 1229212.79 frames.], batch size: 20, lr: 3.78e-04 +2022-05-27 23:21:30,384 INFO [train.py:823] (2/4) Epoch 45, batch 450, loss[loss=0.1479, simple_loss=0.2322, pruned_loss=0.03179, over 7192.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2445, pruned_loss=0.03227, over 1270107.13 frames.], batch size: 18, lr: 3.78e-04 +2022-05-27 23:22:12,153 INFO [train.py:823] (2/4) Epoch 45, batch 500, loss[loss=0.1735, simple_loss=0.258, pruned_loss=0.04448, over 7240.00 frames.], tot_loss[loss=0.155, simple_loss=0.245, pruned_loss=0.03248, over 1309183.64 frames.], batch size: 24, lr: 3.78e-04 +2022-05-27 23:22:51,885 INFO [train.py:823] (2/4) Epoch 45, batch 550, loss[loss=0.1423, simple_loss=0.2257, pruned_loss=0.02945, over 7194.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2442, pruned_loss=0.03242, over 1334963.17 frames.], batch size: 18, lr: 3.78e-04 +2022-05-27 23:23:31,256 INFO [train.py:823] (2/4) Epoch 45, batch 600, loss[loss=0.1432, simple_loss=0.2352, pruned_loss=0.02555, over 6395.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2451, pruned_loss=0.03269, over 1348513.21 frames.], batch size: 34, lr: 3.77e-04 +2022-05-27 23:24:10,312 INFO [train.py:823] (2/4) Epoch 45, batch 650, loss[loss=0.1672, simple_loss=0.2644, pruned_loss=0.035, over 7154.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2448, pruned_loss=0.03225, over 1363589.54 frames.], batch size: 23, lr: 3.77e-04 +2022-05-27 23:24:50,463 INFO [train.py:823] (2/4) Epoch 45, batch 700, loss[loss=0.1361, simple_loss=0.2359, pruned_loss=0.01813, over 7312.00 frames.], tot_loss[loss=0.1547, simple_loss=0.245, pruned_loss=0.03225, over 1377082.17 frames.], batch size: 22, lr: 3.77e-04 +2022-05-27 23:25:30,059 INFO [train.py:823] (2/4) Epoch 45, batch 750, loss[loss=0.1515, simple_loss=0.2421, pruned_loss=0.0305, over 6878.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2449, pruned_loss=0.03205, over 1386267.01 frames.], batch size: 29, lr: 3.77e-04 +2022-05-27 23:26:08,864 INFO [train.py:823] (2/4) Epoch 45, batch 800, loss[loss=0.1899, simple_loss=0.2777, pruned_loss=0.05099, over 7326.00 frames.], tot_loss[loss=0.154, simple_loss=0.2447, pruned_loss=0.0316, over 1395220.12 frames.], batch size: 23, lr: 3.77e-04 +2022-05-27 23:26:49,371 INFO [train.py:823] (2/4) Epoch 45, batch 850, loss[loss=0.1562, simple_loss=0.2493, pruned_loss=0.03153, over 7190.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2454, pruned_loss=0.03172, over 1398065.56 frames.], batch size: 21, lr: 3.76e-04 +2022-05-27 23:27:28,252 INFO [train.py:823] (2/4) Epoch 45, batch 900, loss[loss=0.1539, simple_loss=0.2377, pruned_loss=0.035, over 7007.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2458, pruned_loss=0.0317, over 1400753.71 frames.], batch size: 17, lr: 3.76e-04 +2022-05-27 23:28:22,782 INFO [train.py:823] (2/4) Epoch 46, batch 0, loss[loss=0.1573, simple_loss=0.2492, pruned_loss=0.03273, over 7164.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2492, pruned_loss=0.03273, over 7164.00 frames.], batch size: 22, lr: 3.72e-04 +2022-05-27 23:29:02,167 INFO [train.py:823] (2/4) Epoch 46, batch 50, loss[loss=0.1535, simple_loss=0.2491, pruned_loss=0.02891, over 7287.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2434, pruned_loss=0.03247, over 314797.45 frames.], batch size: 20, lr: 3.72e-04 +2022-05-27 23:29:41,318 INFO [train.py:823] (2/4) Epoch 46, batch 100, loss[loss=0.1341, simple_loss=0.2136, pruned_loss=0.02732, over 7019.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2417, pruned_loss=0.03149, over 561025.48 frames.], batch size: 16, lr: 3.71e-04 +2022-05-27 23:30:20,232 INFO [train.py:823] (2/4) Epoch 46, batch 150, loss[loss=0.1509, simple_loss=0.2432, pruned_loss=0.0293, over 7103.00 frames.], tot_loss[loss=0.1549, simple_loss=0.244, pruned_loss=0.03287, over 753475.24 frames.], batch size: 20, lr: 3.71e-04 +2022-05-27 23:30:59,851 INFO [train.py:823] (2/4) Epoch 46, batch 200, loss[loss=0.1621, simple_loss=0.2476, pruned_loss=0.03828, over 7332.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2427, pruned_loss=0.03203, over 905894.06 frames.], batch size: 23, lr: 3.71e-04 +2022-05-27 23:31:39,147 INFO [train.py:823] (2/4) Epoch 46, batch 250, loss[loss=0.1777, simple_loss=0.2566, pruned_loss=0.04942, over 7124.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2441, pruned_loss=0.03254, over 1019906.95 frames.], batch size: 23, lr: 3.71e-04 +2022-05-27 23:32:18,165 INFO [train.py:823] (2/4) Epoch 46, batch 300, loss[loss=0.1742, simple_loss=0.2713, pruned_loss=0.03849, over 6937.00 frames.], tot_loss[loss=0.1554, simple_loss=0.245, pruned_loss=0.03289, over 1106665.84 frames.], batch size: 29, lr: 3.70e-04 +2022-05-27 23:32:56,786 INFO [train.py:823] (2/4) Epoch 46, batch 350, loss[loss=0.1752, simple_loss=0.2733, pruned_loss=0.0386, over 6529.00 frames.], tot_loss[loss=0.1556, simple_loss=0.2455, pruned_loss=0.03282, over 1178942.55 frames.], batch size: 34, lr: 3.70e-04 +2022-05-27 23:33:36,327 INFO [train.py:823] (2/4) Epoch 46, batch 400, loss[loss=0.1562, simple_loss=0.2499, pruned_loss=0.03123, over 7158.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2459, pruned_loss=0.03249, over 1236076.13 frames.], batch size: 23, lr: 3.70e-04 +2022-05-27 23:34:15,558 INFO [train.py:823] (2/4) Epoch 46, batch 450, loss[loss=0.1652, simple_loss=0.2546, pruned_loss=0.03789, over 7275.00 frames.], tot_loss[loss=0.1548, simple_loss=0.245, pruned_loss=0.0323, over 1278244.46 frames.], batch size: 20, lr: 3.70e-04 +2022-05-27 23:34:54,770 INFO [train.py:823] (2/4) Epoch 46, batch 500, loss[loss=0.1378, simple_loss=0.2227, pruned_loss=0.02644, over 6777.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2457, pruned_loss=0.032, over 1303720.00 frames.], batch size: 15, lr: 3.70e-04 +2022-05-27 23:35:34,045 INFO [train.py:823] (2/4) Epoch 46, batch 550, loss[loss=0.1604, simple_loss=0.2522, pruned_loss=0.03424, over 7311.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2453, pruned_loss=0.0318, over 1333388.18 frames.], batch size: 22, lr: 3.69e-04 +2022-05-27 23:36:13,177 INFO [train.py:823] (2/4) Epoch 46, batch 600, loss[loss=0.1377, simple_loss=0.2273, pruned_loss=0.02408, over 7017.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2443, pruned_loss=0.03142, over 1351795.70 frames.], batch size: 17, lr: 3.69e-04 +2022-05-27 23:36:52,195 INFO [train.py:823] (2/4) Epoch 46, batch 650, loss[loss=0.1668, simple_loss=0.2631, pruned_loss=0.0353, over 7150.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2439, pruned_loss=0.03127, over 1365984.91 frames.], batch size: 23, lr: 3.69e-04 +2022-05-27 23:37:31,619 INFO [train.py:823] (2/4) Epoch 46, batch 700, loss[loss=0.1289, simple_loss=0.2187, pruned_loss=0.01951, over 7148.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2435, pruned_loss=0.03171, over 1374440.10 frames.], batch size: 17, lr: 3.69e-04 +2022-05-27 23:38:10,486 INFO [train.py:823] (2/4) Epoch 46, batch 750, loss[loss=0.1529, simple_loss=0.2465, pruned_loss=0.02959, over 6462.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2427, pruned_loss=0.03145, over 1382938.78 frames.], batch size: 34, lr: 3.69e-04 +2022-05-27 23:38:49,177 INFO [train.py:823] (2/4) Epoch 46, batch 800, loss[loss=0.1612, simple_loss=0.2583, pruned_loss=0.032, over 7194.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2435, pruned_loss=0.03184, over 1386682.15 frames.], batch size: 20, lr: 3.68e-04 +2022-05-27 23:39:28,298 INFO [train.py:823] (2/4) Epoch 46, batch 850, loss[loss=0.1687, simple_loss=0.2549, pruned_loss=0.0413, over 7343.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2426, pruned_loss=0.03175, over 1389700.73 frames.], batch size: 23, lr: 3.68e-04 +2022-05-27 23:40:07,668 INFO [train.py:823] (2/4) Epoch 46, batch 900, loss[loss=0.1405, simple_loss=0.2225, pruned_loss=0.02923, over 7094.00 frames.], tot_loss[loss=0.1535, simple_loss=0.243, pruned_loss=0.03199, over 1397191.76 frames.], batch size: 18, lr: 3.68e-04 +2022-05-27 23:41:01,393 INFO [train.py:823] (2/4) Epoch 47, batch 0, loss[loss=0.144, simple_loss=0.2297, pruned_loss=0.02917, over 7002.00 frames.], tot_loss[loss=0.144, simple_loss=0.2297, pruned_loss=0.02917, over 7002.00 frames.], batch size: 16, lr: 3.64e-04 +2022-05-27 23:41:40,290 INFO [train.py:823] (2/4) Epoch 47, batch 50, loss[loss=0.1478, simple_loss=0.2172, pruned_loss=0.03924, over 7273.00 frames.], tot_loss[loss=0.152, simple_loss=0.2422, pruned_loss=0.03095, over 321774.82 frames.], batch size: 17, lr: 3.64e-04 +2022-05-27 23:42:19,484 INFO [train.py:823] (2/4) Epoch 47, batch 100, loss[loss=0.1302, simple_loss=0.2167, pruned_loss=0.02183, over 7300.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2412, pruned_loss=0.0308, over 565682.98 frames.], batch size: 18, lr: 3.63e-04 +2022-05-27 23:42:59,001 INFO [train.py:823] (2/4) Epoch 47, batch 150, loss[loss=0.1982, simple_loss=0.2966, pruned_loss=0.04996, over 7288.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2423, pruned_loss=0.03136, over 757267.20 frames.], batch size: 22, lr: 3.63e-04 +2022-05-27 23:43:37,746 INFO [train.py:823] (2/4) Epoch 47, batch 200, loss[loss=0.1812, simple_loss=0.2526, pruned_loss=0.05491, over 7089.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2427, pruned_loss=0.03159, over 901814.16 frames.], batch size: 18, lr: 3.63e-04 +2022-05-27 23:44:17,121 INFO [train.py:823] (2/4) Epoch 47, batch 250, loss[loss=0.155, simple_loss=0.2419, pruned_loss=0.03409, over 7400.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2441, pruned_loss=0.03125, over 1022714.83 frames.], batch size: 19, lr: 3.63e-04 +2022-05-27 23:44:56,264 INFO [train.py:823] (2/4) Epoch 47, batch 300, loss[loss=0.1313, simple_loss=0.2162, pruned_loss=0.02321, over 7190.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2443, pruned_loss=0.0311, over 1111770.93 frames.], batch size: 18, lr: 3.63e-04 +2022-05-27 23:45:37,076 INFO [train.py:823] (2/4) Epoch 47, batch 350, loss[loss=0.1492, simple_loss=0.2396, pruned_loss=0.02935, over 7287.00 frames.], tot_loss[loss=0.153, simple_loss=0.2439, pruned_loss=0.03106, over 1179236.77 frames.], batch size: 20, lr: 3.62e-04 +2022-05-27 23:46:17,197 INFO [train.py:823] (2/4) Epoch 47, batch 400, loss[loss=0.155, simple_loss=0.2574, pruned_loss=0.02633, over 7282.00 frames.], tot_loss[loss=0.154, simple_loss=0.2447, pruned_loss=0.03166, over 1233158.69 frames.], batch size: 20, lr: 3.62e-04 +2022-05-27 23:46:56,142 INFO [train.py:823] (2/4) Epoch 47, batch 450, loss[loss=0.145, simple_loss=0.2291, pruned_loss=0.03043, over 7152.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2449, pruned_loss=0.03173, over 1273940.45 frames.], batch size: 17, lr: 3.62e-04 +2022-05-27 23:47:36,430 INFO [train.py:823] (2/4) Epoch 47, batch 500, loss[loss=0.1462, simple_loss=0.232, pruned_loss=0.03022, over 7103.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2449, pruned_loss=0.03202, over 1302146.15 frames.], batch size: 19, lr: 3.62e-04 +2022-05-27 23:48:15,863 INFO [train.py:823] (2/4) Epoch 47, batch 550, loss[loss=0.1388, simple_loss=0.2293, pruned_loss=0.02413, over 7384.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2441, pruned_loss=0.03189, over 1326606.42 frames.], batch size: 19, lr: 3.62e-04 +2022-05-27 23:48:54,641 INFO [train.py:823] (2/4) Epoch 47, batch 600, loss[loss=0.1632, simple_loss=0.2654, pruned_loss=0.03053, over 7022.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2444, pruned_loss=0.0317, over 1345630.12 frames.], batch size: 26, lr: 3.61e-04 +2022-05-27 23:49:34,989 INFO [train.py:823] (2/4) Epoch 47, batch 650, loss[loss=0.134, simple_loss=0.2173, pruned_loss=0.02537, over 7306.00 frames.], tot_loss[loss=0.154, simple_loss=0.2449, pruned_loss=0.03154, over 1363439.02 frames.], batch size: 17, lr: 3.61e-04 +2022-05-27 23:50:14,041 INFO [train.py:823] (2/4) Epoch 47, batch 700, loss[loss=0.1845, simple_loss=0.2784, pruned_loss=0.0453, over 7327.00 frames.], tot_loss[loss=0.1543, simple_loss=0.245, pruned_loss=0.03177, over 1370700.87 frames.], batch size: 23, lr: 3.61e-04 +2022-05-27 23:50:53,706 INFO [train.py:823] (2/4) Epoch 47, batch 750, loss[loss=0.1407, simple_loss=0.2325, pruned_loss=0.02449, over 7289.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2448, pruned_loss=0.03203, over 1382659.74 frames.], batch size: 19, lr: 3.61e-04 +2022-05-27 23:51:32,290 INFO [train.py:823] (2/4) Epoch 47, batch 800, loss[loss=0.1733, simple_loss=0.2685, pruned_loss=0.03903, over 7026.00 frames.], tot_loss[loss=0.1537, simple_loss=0.244, pruned_loss=0.03165, over 1390392.22 frames.], batch size: 26, lr: 3.61e-04 +2022-05-27 23:52:11,488 INFO [train.py:823] (2/4) Epoch 47, batch 850, loss[loss=0.1263, simple_loss=0.2206, pruned_loss=0.016, over 7187.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2442, pruned_loss=0.03177, over 1391754.70 frames.], batch size: 18, lr: 3.60e-04 +2022-05-27 23:52:50,576 INFO [train.py:823] (2/4) Epoch 47, batch 900, loss[loss=0.1814, simple_loss=0.2721, pruned_loss=0.04537, over 7306.00 frames.], tot_loss[loss=0.154, simple_loss=0.2446, pruned_loss=0.03171, over 1396969.89 frames.], batch size: 22, lr: 3.60e-04 +2022-05-27 23:53:43,420 INFO [train.py:823] (2/4) Epoch 48, batch 0, loss[loss=0.1615, simple_loss=0.2475, pruned_loss=0.03772, over 7184.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2475, pruned_loss=0.03772, over 7184.00 frames.], batch size: 21, lr: 3.56e-04 +2022-05-27 23:54:22,661 INFO [train.py:823] (2/4) Epoch 48, batch 50, loss[loss=0.1419, simple_loss=0.2305, pruned_loss=0.0267, over 7139.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2432, pruned_loss=0.03357, over 320058.32 frames.], batch size: 17, lr: 3.56e-04 +2022-05-27 23:55:01,967 INFO [train.py:823] (2/4) Epoch 48, batch 100, loss[loss=0.1576, simple_loss=0.2485, pruned_loss=0.03334, over 7195.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2433, pruned_loss=0.03295, over 564668.80 frames.], batch size: 25, lr: 3.56e-04 +2022-05-27 23:55:41,060 INFO [train.py:823] (2/4) Epoch 48, batch 150, loss[loss=0.1206, simple_loss=0.2087, pruned_loss=0.01623, over 7291.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2439, pruned_loss=0.03197, over 759044.07 frames.], batch size: 17, lr: 3.56e-04 +2022-05-27 23:56:20,139 INFO [train.py:823] (2/4) Epoch 48, batch 200, loss[loss=0.183, simple_loss=0.2872, pruned_loss=0.03939, over 7303.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2429, pruned_loss=0.03121, over 906866.83 frames.], batch size: 22, lr: 3.55e-04 +2022-05-27 23:56:59,324 INFO [train.py:823] (2/4) Epoch 48, batch 250, loss[loss=0.152, simple_loss=0.2393, pruned_loss=0.03241, over 7200.00 frames.], tot_loss[loss=0.152, simple_loss=0.2417, pruned_loss=0.03112, over 1022784.67 frames.], batch size: 19, lr: 3.55e-04 +2022-05-27 23:57:38,936 INFO [train.py:823] (2/4) Epoch 48, batch 300, loss[loss=0.1838, simple_loss=0.2702, pruned_loss=0.04868, over 7089.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2406, pruned_loss=0.03109, over 1114965.97 frames.], batch size: 26, lr: 3.55e-04 +2022-05-27 23:58:17,911 INFO [train.py:823] (2/4) Epoch 48, batch 350, loss[loss=0.1723, simple_loss=0.2679, pruned_loss=0.03835, over 4955.00 frames.], tot_loss[loss=0.1518, simple_loss=0.241, pruned_loss=0.03134, over 1181386.27 frames.], batch size: 48, lr: 3.55e-04 +2022-05-27 23:58:57,358 INFO [train.py:823] (2/4) Epoch 48, batch 400, loss[loss=0.1391, simple_loss=0.2339, pruned_loss=0.02214, over 6387.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2415, pruned_loss=0.03141, over 1235964.37 frames.], batch size: 34, lr: 3.55e-04 +2022-05-27 23:59:36,506 INFO [train.py:823] (2/4) Epoch 48, batch 450, loss[loss=0.1503, simple_loss=0.2285, pruned_loss=0.03601, over 7285.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2423, pruned_loss=0.03163, over 1278765.13 frames.], batch size: 17, lr: 3.54e-04 +2022-05-28 00:00:15,816 INFO [train.py:823] (2/4) Epoch 48, batch 500, loss[loss=0.1535, simple_loss=0.2422, pruned_loss=0.03243, over 7200.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2429, pruned_loss=0.03189, over 1308961.38 frames.], batch size: 20, lr: 3.54e-04 +2022-05-28 00:00:54,501 INFO [train.py:823] (2/4) Epoch 48, batch 550, loss[loss=0.1553, simple_loss=0.2498, pruned_loss=0.03038, over 7417.00 frames.], tot_loss[loss=0.1534, simple_loss=0.243, pruned_loss=0.03188, over 1329961.92 frames.], batch size: 22, lr: 3.54e-04 +2022-05-28 00:01:33,228 INFO [train.py:823] (2/4) Epoch 48, batch 600, loss[loss=0.1577, simple_loss=0.2429, pruned_loss=0.03621, over 7278.00 frames.], tot_loss[loss=0.154, simple_loss=0.2438, pruned_loss=0.03214, over 1349044.76 frames.], batch size: 20, lr: 3.54e-04 +2022-05-28 00:02:11,684 INFO [train.py:823] (2/4) Epoch 48, batch 650, loss[loss=0.1563, simple_loss=0.2599, pruned_loss=0.02636, over 7383.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2437, pruned_loss=0.03192, over 1363312.62 frames.], batch size: 21, lr: 3.54e-04 +2022-05-28 00:02:51,193 INFO [train.py:823] (2/4) Epoch 48, batch 700, loss[loss=0.1741, simple_loss=0.2675, pruned_loss=0.04037, over 7161.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2445, pruned_loss=0.03158, over 1371159.72 frames.], batch size: 22, lr: 3.53e-04 +2022-05-28 00:03:30,286 INFO [train.py:823] (2/4) Epoch 48, batch 750, loss[loss=0.1507, simple_loss=0.2378, pruned_loss=0.03179, over 7103.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2442, pruned_loss=0.03165, over 1383359.91 frames.], batch size: 19, lr: 3.53e-04 +2022-05-28 00:04:09,545 INFO [train.py:823] (2/4) Epoch 48, batch 800, loss[loss=0.1486, simple_loss=0.2389, pruned_loss=0.02917, over 7335.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2441, pruned_loss=0.03162, over 1390851.40 frames.], batch size: 23, lr: 3.53e-04 +2022-05-28 00:04:48,597 INFO [train.py:823] (2/4) Epoch 48, batch 850, loss[loss=0.1464, simple_loss=0.2359, pruned_loss=0.02841, over 7271.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2432, pruned_loss=0.03168, over 1391103.74 frames.], batch size: 17, lr: 3.53e-04 +2022-05-28 00:05:27,346 INFO [train.py:823] (2/4) Epoch 48, batch 900, loss[loss=0.1486, simple_loss=0.2371, pruned_loss=0.03003, over 7291.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2435, pruned_loss=0.03163, over 1394671.34 frames.], batch size: 19, lr: 3.53e-04 +2022-05-28 00:06:18,160 INFO [train.py:823] (2/4) Epoch 49, batch 0, loss[loss=0.1602, simple_loss=0.25, pruned_loss=0.03518, over 7383.00 frames.], tot_loss[loss=0.1602, simple_loss=0.25, pruned_loss=0.03518, over 7383.00 frames.], batch size: 20, lr: 3.49e-04 +2022-05-28 00:06:57,231 INFO [train.py:823] (2/4) Epoch 49, batch 50, loss[loss=0.153, simple_loss=0.2555, pruned_loss=0.02527, over 7287.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2432, pruned_loss=0.03048, over 319059.04 frames.], batch size: 21, lr: 3.49e-04 +2022-05-28 00:07:37,600 INFO [train.py:823] (2/4) Epoch 49, batch 100, loss[loss=0.1329, simple_loss=0.2181, pruned_loss=0.02386, over 7186.00 frames.], tot_loss[loss=0.152, simple_loss=0.2428, pruned_loss=0.03062, over 560953.40 frames.], batch size: 18, lr: 3.48e-04 +2022-05-28 00:08:16,695 INFO [train.py:823] (2/4) Epoch 49, batch 150, loss[loss=0.1488, simple_loss=0.2322, pruned_loss=0.03275, over 5431.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2426, pruned_loss=0.03093, over 751657.48 frames.], batch size: 47, lr: 3.48e-04 +2022-05-28 00:08:56,118 INFO [train.py:823] (2/4) Epoch 49, batch 200, loss[loss=0.1568, simple_loss=0.2447, pruned_loss=0.0345, over 7158.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2419, pruned_loss=0.03059, over 901870.09 frames.], batch size: 23, lr: 3.48e-04 +2022-05-28 00:09:37,933 INFO [train.py:823] (2/4) Epoch 49, batch 250, loss[loss=0.1582, simple_loss=0.2455, pruned_loss=0.03547, over 7191.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2434, pruned_loss=0.0309, over 1021921.78 frames.], batch size: 20, lr: 3.48e-04 +2022-05-28 00:10:17,058 INFO [train.py:823] (2/4) Epoch 49, batch 300, loss[loss=0.1342, simple_loss=0.2134, pruned_loss=0.02753, over 7305.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2418, pruned_loss=0.03059, over 1113658.74 frames.], batch size: 18, lr: 3.48e-04 +2022-05-28 00:10:56,099 INFO [train.py:823] (2/4) Epoch 49, batch 350, loss[loss=0.1604, simple_loss=0.2555, pruned_loss=0.03261, over 7230.00 frames.], tot_loss[loss=0.1511, simple_loss=0.2415, pruned_loss=0.03033, over 1177265.59 frames.], batch size: 25, lr: 3.48e-04 +2022-05-28 00:11:35,226 INFO [train.py:823] (2/4) Epoch 49, batch 400, loss[loss=0.1256, simple_loss=0.2088, pruned_loss=0.02123, over 7011.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2423, pruned_loss=0.03092, over 1228308.05 frames.], batch size: 16, lr: 3.47e-04 +2022-05-28 00:12:14,653 INFO [train.py:823] (2/4) Epoch 49, batch 450, loss[loss=0.162, simple_loss=0.2593, pruned_loss=0.03233, over 7204.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2424, pruned_loss=0.03086, over 1273327.96 frames.], batch size: 24, lr: 3.47e-04 +2022-05-28 00:12:54,468 INFO [train.py:823] (2/4) Epoch 49, batch 500, loss[loss=0.1654, simple_loss=0.2649, pruned_loss=0.03298, over 6445.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2417, pruned_loss=0.0305, over 1304803.67 frames.], batch size: 34, lr: 3.47e-04 +2022-05-28 00:13:33,860 INFO [train.py:823] (2/4) Epoch 49, batch 550, loss[loss=0.1398, simple_loss=0.2204, pruned_loss=0.0296, over 7288.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2412, pruned_loss=0.03083, over 1332037.55 frames.], batch size: 17, lr: 3.47e-04 +2022-05-28 00:14:12,735 INFO [train.py:823] (2/4) Epoch 49, batch 600, loss[loss=0.1566, simple_loss=0.2487, pruned_loss=0.03222, over 7217.00 frames.], tot_loss[loss=0.152, simple_loss=0.2418, pruned_loss=0.03108, over 1351463.65 frames.], batch size: 24, lr: 3.47e-04 +2022-05-28 00:14:52,428 INFO [train.py:823] (2/4) Epoch 49, batch 650, loss[loss=0.1457, simple_loss=0.2255, pruned_loss=0.03297, over 7163.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2418, pruned_loss=0.03073, over 1366918.70 frames.], batch size: 17, lr: 3.46e-04 +2022-05-28 00:15:31,430 INFO [train.py:823] (2/4) Epoch 49, batch 700, loss[loss=0.1529, simple_loss=0.2443, pruned_loss=0.03072, over 7418.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2428, pruned_loss=0.03137, over 1370258.42 frames.], batch size: 22, lr: 3.46e-04 +2022-05-28 00:16:11,054 INFO [train.py:823] (2/4) Epoch 49, batch 750, loss[loss=0.14, simple_loss=0.2311, pruned_loss=0.02445, over 7300.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2433, pruned_loss=0.03122, over 1380828.09 frames.], batch size: 19, lr: 3.46e-04 +2022-05-28 00:16:49,992 INFO [train.py:823] (2/4) Epoch 49, batch 800, loss[loss=0.1362, simple_loss=0.2151, pruned_loss=0.02864, over 7162.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2433, pruned_loss=0.03103, over 1384006.42 frames.], batch size: 17, lr: 3.46e-04 +2022-05-28 00:17:29,761 INFO [train.py:823] (2/4) Epoch 49, batch 850, loss[loss=0.1407, simple_loss=0.233, pruned_loss=0.02421, over 7087.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2435, pruned_loss=0.03132, over 1391736.31 frames.], batch size: 18, lr: 3.46e-04 +2022-05-28 00:18:08,793 INFO [train.py:823] (2/4) Epoch 49, batch 900, loss[loss=0.1454, simple_loss=0.2372, pruned_loss=0.02683, over 6468.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2437, pruned_loss=0.03118, over 1394832.55 frames.], batch size: 34, lr: 3.45e-04 +2022-05-28 00:19:00,838 INFO [train.py:823] (2/4) Epoch 50, batch 0, loss[loss=0.168, simple_loss=0.2557, pruned_loss=0.04013, over 6957.00 frames.], tot_loss[loss=0.168, simple_loss=0.2557, pruned_loss=0.04013, over 6957.00 frames.], batch size: 29, lr: 3.42e-04 +2022-05-28 00:19:39,998 INFO [train.py:823] (2/4) Epoch 50, batch 50, loss[loss=0.1379, simple_loss=0.2317, pruned_loss=0.02206, over 7288.00 frames.], tot_loss[loss=0.1496, simple_loss=0.2379, pruned_loss=0.03064, over 320766.65 frames.], batch size: 20, lr: 3.42e-04 +2022-05-28 00:20:19,160 INFO [train.py:823] (2/4) Epoch 50, batch 100, loss[loss=0.1737, simple_loss=0.2662, pruned_loss=0.04061, over 7186.00 frames.], tot_loss[loss=0.1498, simple_loss=0.2395, pruned_loss=0.03007, over 561358.11 frames.], batch size: 23, lr: 3.41e-04 +2022-05-28 00:20:58,319 INFO [train.py:823] (2/4) Epoch 50, batch 150, loss[loss=0.1625, simple_loss=0.2643, pruned_loss=0.03029, over 7371.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2429, pruned_loss=0.0314, over 751560.77 frames.], batch size: 21, lr: 3.41e-04 +2022-05-28 00:21:37,463 INFO [train.py:823] (2/4) Epoch 50, batch 200, loss[loss=0.136, simple_loss=0.2154, pruned_loss=0.02826, over 7089.00 frames.], tot_loss[loss=0.1521, simple_loss=0.2427, pruned_loss=0.03074, over 900701.76 frames.], batch size: 18, lr: 3.41e-04 +2022-05-28 00:22:16,583 INFO [train.py:823] (2/4) Epoch 50, batch 250, loss[loss=0.1896, simple_loss=0.2902, pruned_loss=0.04449, over 7176.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2432, pruned_loss=0.03115, over 1018120.49 frames.], batch size: 22, lr: 3.41e-04 +2022-05-28 00:22:55,547 INFO [train.py:823] (2/4) Epoch 50, batch 300, loss[loss=0.1495, simple_loss=0.2395, pruned_loss=0.02978, over 7193.00 frames.], tot_loss[loss=0.153, simple_loss=0.244, pruned_loss=0.03104, over 1108905.32 frames.], batch size: 20, lr: 3.41e-04 +2022-05-28 00:23:34,739 INFO [train.py:823] (2/4) Epoch 50, batch 350, loss[loss=0.1629, simple_loss=0.2439, pruned_loss=0.04093, over 7408.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2436, pruned_loss=0.03106, over 1177705.66 frames.], batch size: 22, lr: 3.41e-04 +2022-05-28 00:24:13,682 INFO [train.py:823] (2/4) Epoch 50, batch 400, loss[loss=0.1525, simple_loss=0.2486, pruned_loss=0.0282, over 7032.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2417, pruned_loss=0.03048, over 1232248.22 frames.], batch size: 26, lr: 3.40e-04 +2022-05-28 00:24:52,172 INFO [train.py:823] (2/4) Epoch 50, batch 450, loss[loss=0.1636, simple_loss=0.2589, pruned_loss=0.03415, over 6610.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2421, pruned_loss=0.03041, over 1273636.09 frames.], batch size: 34, lr: 3.40e-04 +2022-05-28 00:25:31,605 INFO [train.py:823] (2/4) Epoch 50, batch 500, loss[loss=0.1496, simple_loss=0.2301, pruned_loss=0.0346, over 7287.00 frames.], tot_loss[loss=0.1513, simple_loss=0.242, pruned_loss=0.03034, over 1306293.37 frames.], batch size: 19, lr: 3.40e-04 +2022-05-28 00:26:10,810 INFO [train.py:823] (2/4) Epoch 50, batch 550, loss[loss=0.1742, simple_loss=0.2707, pruned_loss=0.03889, over 7229.00 frames.], tot_loss[loss=0.1512, simple_loss=0.2417, pruned_loss=0.03039, over 1334059.99 frames.], batch size: 24, lr: 3.40e-04 +2022-05-28 00:26:49,689 INFO [train.py:823] (2/4) Epoch 50, batch 600, loss[loss=0.1324, simple_loss=0.2154, pruned_loss=0.02463, over 7019.00 frames.], tot_loss[loss=0.1517, simple_loss=0.242, pruned_loss=0.03068, over 1353092.62 frames.], batch size: 16, lr: 3.40e-04 +2022-05-28 00:27:28,441 INFO [train.py:823] (2/4) Epoch 50, batch 650, loss[loss=0.1179, simple_loss=0.1977, pruned_loss=0.01903, over 7003.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2426, pruned_loss=0.03061, over 1363982.84 frames.], batch size: 16, lr: 3.39e-04 +2022-05-28 00:28:07,672 INFO [train.py:823] (2/4) Epoch 50, batch 700, loss[loss=0.1327, simple_loss=0.2133, pruned_loss=0.02609, over 7007.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2419, pruned_loss=0.0304, over 1376263.93 frames.], batch size: 16, lr: 3.39e-04 +2022-05-28 00:28:46,799 INFO [train.py:823] (2/4) Epoch 50, batch 750, loss[loss=0.1556, simple_loss=0.2574, pruned_loss=0.02689, over 7313.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2423, pruned_loss=0.03079, over 1383274.17 frames.], batch size: 22, lr: 3.39e-04 +2022-05-28 00:29:26,189 INFO [train.py:823] (2/4) Epoch 50, batch 800, loss[loss=0.1317, simple_loss=0.2264, pruned_loss=0.01852, over 7097.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2418, pruned_loss=0.03046, over 1390575.50 frames.], batch size: 19, lr: 3.39e-04 +2022-05-28 00:30:05,670 INFO [train.py:823] (2/4) Epoch 50, batch 850, loss[loss=0.1623, simple_loss=0.2573, pruned_loss=0.0336, over 4871.00 frames.], tot_loss[loss=0.1515, simple_loss=0.2418, pruned_loss=0.03062, over 1396563.15 frames.], batch size: 46, lr: 3.39e-04 +2022-05-28 00:30:45,825 INFO [train.py:823] (2/4) Epoch 50, batch 900, loss[loss=0.1604, simple_loss=0.247, pruned_loss=0.03686, over 6332.00 frames.], tot_loss[loss=0.152, simple_loss=0.2426, pruned_loss=0.03075, over 1398816.82 frames.], batch size: 34, lr: 3.39e-04 +2022-05-28 00:31:24,785 INFO [train.py:1038] (2/4) Done!