diff --git "a/baseline/log/log-train-2022-05-27-13-57-22-3" "b/baseline/log/log-train-2022-05-27-13-57-22-3" new file mode 100644--- /dev/null +++ "b/baseline/log/log-train-2022-05-27-13-57-22-3" @@ -0,0 +1,982 @@ +2022-05-27 13:57:22,682 INFO [train.py:887] (3/4) Training started +2022-05-27 13:57:22,683 INFO [train.py:897] (3/4) Device: cuda:3 +2022-05-27 13:57:22,685 INFO [train.py:906] (3/4) {'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 1600, 'feature_dim': 80, 'subsampling_factor': 4, 'encoder_dim': 512, 'nhead': 8, 'dim_feedforward': 2048, 'num_encoder_layers': 12, 'decoder_dim': 512, 'joiner_dim': 512, 'model_warm_step': 3000, 'env_info': {'k2-version': '1.13', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': 'f4fefe4882bc0ae59af951da3f47335d5495ef71', 'k2-git-date': 'Thu Feb 10 15:16:02 2022', 'lhotse-version': '1.1.0', 'torch-version': '1.10.0+cu102', 'torch-cuda-available': True, 'torch-cuda-version': '10.2', 'python-version': '3.8', 'icefall-git-branch': 'stateless6', 'icefall-git-sha1': '50641cd-dirty', 'icefall-git-date': 'Fri May 27 13:49:39 2022', 'icefall-path': '/ceph-data2/ly/open_source/vq2_icefall', 'k2-path': '/ceph-jb/yaozengwei/workspace/rnnt/k2/k2/python/k2/__init__.py', 'lhotse-path': '/ceph-ly/open-source/hubert/lhotse/lhotse/__init__.py', 'hostname': 'de-74279-k2-train-9-0425111216-65f66bdf4-bkrql', 'IP address': '10.177.77.9'}, 'enable_distiallation': False, 'distillation_layer': 5, 'num_codebooks': 16, 'world_size': 4, 'master_port': 12359, 'tensorboard': True, 'num_epochs': 50, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless6/exp'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'initial_lr': 0.003, 'lr_batches': 5000, 'lr_epochs': 6, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'codebook_loss_scale': 0.1, 'seed': 42, 'print_diagnostics': False, 'save_every_n': 8000, 'keep_last_k': 20, 'average_period': 100, 'use_fp16': False, 'full_libri': False, 'manifest_dir': PosixPath('data/vq_fbank'), 'max_duration': 300, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': -1, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2022-05-27 13:57:22,685 INFO [train.py:908] (3/4) About to create model +2022-05-27 13:57:23,152 INFO [train.py:912] (3/4) Number of model parameters: 78648040 +2022-05-27 13:57:28,832 INFO [train.py:927] (3/4) Using DDP +2022-05-27 13:57:29,321 INFO [asr_datamodule.py:408] (3/4) About to get train-clean-100 cuts +2022-05-27 13:57:40,319 INFO [asr_datamodule.py:225] (3/4) Enable MUSAN +2022-05-27 13:57:40,320 INFO [asr_datamodule.py:226] (3/4) About to get Musan cuts +2022-05-27 13:57:43,973 INFO [asr_datamodule.py:254] (3/4) Enable SpecAugment +2022-05-27 13:57:43,974 INFO [asr_datamodule.py:255] (3/4) Time warp factor: -1 +2022-05-27 13:57:43,974 INFO [asr_datamodule.py:267] (3/4) Num frame mask: 10 +2022-05-27 13:57:43,974 INFO [asr_datamodule.py:280] (3/4) About to create train dataset +2022-05-27 13:57:43,974 INFO [asr_datamodule.py:309] (3/4) Using BucketingSampler. +2022-05-27 13:57:44,405 INFO [asr_datamodule.py:325] (3/4) About to create train dataloader +2022-05-27 13:57:44,406 INFO [asr_datamodule.py:429] (3/4) About to get dev-clean cuts +2022-05-27 13:57:44,585 INFO [asr_datamodule.py:434] (3/4) About to get dev-other cuts +2022-05-27 13:57:44,746 INFO [asr_datamodule.py:356] (3/4) About to create dev dataset +2022-05-27 13:57:44,762 INFO [asr_datamodule.py:375] (3/4) About to create dev dataloader +2022-05-27 13:57:44,762 INFO [train.py:1054] (3/4) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2022-05-27 13:57:47,591 INFO [distributed.py:874] (3/4) Reducer buckets have been rebuilt in this iteration. +2022-05-27 13:58:01,568 INFO [train.py:823] (3/4) Epoch 1, batch 0, loss[loss=0.9621, simple_loss=1.924, pruned_loss=6.909, over 7286.00 frames.], tot_loss[loss=0.9621, simple_loss=1.924, pruned_loss=6.909, over 7286.00 frames.], batch size: 21, lr: 3.00e-03 +2022-05-27 13:58:40,757 INFO [train.py:823] (3/4) Epoch 1, batch 50, loss[loss=0.5468, simple_loss=1.094, pruned_loss=7.179, over 7161.00 frames.], tot_loss[loss=0.5769, simple_loss=1.154, pruned_loss=7.16, over 322028.89 frames.], batch size: 23, lr: 3.00e-03 +2022-05-27 13:59:20,074 INFO [train.py:823] (3/4) Epoch 1, batch 100, loss[loss=0.4426, simple_loss=0.8853, pruned_loss=6.979, over 7188.00 frames.], tot_loss[loss=0.5162, simple_loss=1.032, pruned_loss=7.074, over 564021.41 frames.], batch size: 20, lr: 3.00e-03 +2022-05-27 13:59:59,646 INFO [train.py:823] (3/4) Epoch 1, batch 150, loss[loss=0.4624, simple_loss=0.9248, pruned_loss=6.86, over 7338.00 frames.], tot_loss[loss=0.479, simple_loss=0.958, pruned_loss=6.974, over 754370.83 frames.], batch size: 23, lr: 3.00e-03 +2022-05-27 14:00:39,040 INFO [train.py:823] (3/4) Epoch 1, batch 200, loss[loss=0.3834, simple_loss=0.7669, pruned_loss=6.678, over 7300.00 frames.], tot_loss[loss=0.4585, simple_loss=0.9171, pruned_loss=6.905, over 903263.45 frames.], batch size: 19, lr: 3.00e-03 +2022-05-27 14:01:18,169 INFO [train.py:823] (3/4) Epoch 1, batch 250, loss[loss=0.3667, simple_loss=0.7333, pruned_loss=6.589, over 7287.00 frames.], tot_loss[loss=0.443, simple_loss=0.886, pruned_loss=6.828, over 1014421.72 frames.], batch size: 17, lr: 3.00e-03 +2022-05-27 14:01:57,502 INFO [train.py:823] (3/4) Epoch 1, batch 300, loss[loss=0.4433, simple_loss=0.8866, pruned_loss=6.828, over 7199.00 frames.], tot_loss[loss=0.4317, simple_loss=0.8633, pruned_loss=6.781, over 1106216.70 frames.], batch size: 24, lr: 3.00e-03 +2022-05-27 14:02:36,817 INFO [train.py:823] (3/4) Epoch 1, batch 350, loss[loss=0.4053, simple_loss=0.8107, pruned_loss=6.57, over 6592.00 frames.], tot_loss[loss=0.421, simple_loss=0.842, pruned_loss=6.747, over 1177131.05 frames.], batch size: 34, lr: 3.00e-03 +2022-05-27 14:03:16,138 INFO [train.py:823] (3/4) Epoch 1, batch 400, loss[loss=0.3731, simple_loss=0.7462, pruned_loss=6.651, over 5085.00 frames.], tot_loss[loss=0.4113, simple_loss=0.8226, pruned_loss=6.719, over 1228101.57 frames.], batch size: 46, lr: 3.00e-03 +2022-05-27 14:03:55,455 INFO [train.py:823] (3/4) Epoch 1, batch 450, loss[loss=0.3504, simple_loss=0.7009, pruned_loss=6.561, over 7193.00 frames.], tot_loss[loss=0.3982, simple_loss=0.7965, pruned_loss=6.701, over 1273912.37 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:04:34,523 INFO [train.py:823] (3/4) Epoch 1, batch 500, loss[loss=0.3249, simple_loss=0.6499, pruned_loss=6.626, over 7380.00 frames.], tot_loss[loss=0.3823, simple_loss=0.7647, pruned_loss=6.692, over 1308352.84 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:05:13,702 INFO [train.py:823] (3/4) Epoch 1, batch 550, loss[loss=0.3231, simple_loss=0.6462, pruned_loss=6.843, over 7202.00 frames.], tot_loss[loss=0.3653, simple_loss=0.7307, pruned_loss=6.688, over 1329858.82 frames.], batch size: 25, lr: 2.99e-03 +2022-05-27 14:05:53,164 INFO [train.py:823] (3/4) Epoch 1, batch 600, loss[loss=0.2755, simple_loss=0.551, pruned_loss=6.715, over 7295.00 frames.], tot_loss[loss=0.3466, simple_loss=0.6933, pruned_loss=6.681, over 1346431.42 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:06:31,999 INFO [train.py:823] (3/4) Epoch 1, batch 650, loss[loss=0.2672, simple_loss=0.5343, pruned_loss=6.681, over 7097.00 frames.], tot_loss[loss=0.3312, simple_loss=0.6624, pruned_loss=6.679, over 1361092.04 frames.], batch size: 19, lr: 2.99e-03 +2022-05-27 14:07:11,386 INFO [train.py:823] (3/4) Epoch 1, batch 700, loss[loss=0.2526, simple_loss=0.5051, pruned_loss=6.591, over 7158.00 frames.], tot_loss[loss=0.3158, simple_loss=0.6315, pruned_loss=6.677, over 1373735.52 frames.], batch size: 17, lr: 2.99e-03 +2022-05-27 14:07:50,433 INFO [train.py:823] (3/4) Epoch 1, batch 750, loss[loss=0.2113, simple_loss=0.4226, pruned_loss=6.569, over 7252.00 frames.], tot_loss[loss=0.3033, simple_loss=0.6067, pruned_loss=6.688, over 1387143.31 frames.], batch size: 16, lr: 2.98e-03 +2022-05-27 14:08:29,975 INFO [train.py:823] (3/4) Epoch 1, batch 800, loss[loss=0.2447, simple_loss=0.4895, pruned_loss=6.805, over 7154.00 frames.], tot_loss[loss=0.2915, simple_loss=0.583, pruned_loss=6.693, over 1393311.31 frames.], batch size: 23, lr: 2.98e-03 +2022-05-27 14:09:08,835 INFO [train.py:823] (3/4) Epoch 1, batch 850, loss[loss=0.211, simple_loss=0.422, pruned_loss=6.593, over 7008.00 frames.], tot_loss[loss=0.2814, simple_loss=0.5628, pruned_loss=6.7, over 1400289.56 frames.], batch size: 16, lr: 2.98e-03 +2022-05-27 14:09:47,678 INFO [train.py:823] (3/4) Epoch 1, batch 900, loss[loss=0.2124, simple_loss=0.4248, pruned_loss=6.605, over 7288.00 frames.], tot_loss[loss=0.2712, simple_loss=0.5423, pruned_loss=6.705, over 1403742.06 frames.], batch size: 17, lr: 2.98e-03 +2022-05-27 14:10:41,069 INFO [train.py:823] (3/4) Epoch 2, batch 0, loss[loss=0.2109, simple_loss=0.4218, pruned_loss=6.69, over 7099.00 frames.], tot_loss[loss=0.2109, simple_loss=0.4218, pruned_loss=6.69, over 7099.00 frames.], batch size: 19, lr: 2.95e-03 +2022-05-27 14:11:20,546 INFO [train.py:823] (3/4) Epoch 2, batch 50, loss[loss=0.224, simple_loss=0.4479, pruned_loss=6.749, over 7378.00 frames.], tot_loss[loss=0.2304, simple_loss=0.4609, pruned_loss=6.726, over 322848.62 frames.], batch size: 21, lr: 2.95e-03 +2022-05-27 14:11:59,989 INFO [train.py:823] (3/4) Epoch 2, batch 100, loss[loss=0.2391, simple_loss=0.4782, pruned_loss=6.804, over 6986.00 frames.], tot_loss[loss=0.2269, simple_loss=0.4537, pruned_loss=6.728, over 564403.17 frames.], batch size: 26, lr: 2.95e-03 +2022-05-27 14:12:39,339 INFO [train.py:823] (3/4) Epoch 2, batch 150, loss[loss=0.1889, simple_loss=0.3778, pruned_loss=6.659, over 7296.00 frames.], tot_loss[loss=0.2228, simple_loss=0.4456, pruned_loss=6.727, over 758504.82 frames.], batch size: 17, lr: 2.94e-03 +2022-05-27 14:13:18,839 INFO [train.py:823] (3/4) Epoch 2, batch 200, loss[loss=0.2082, simple_loss=0.4164, pruned_loss=6.822, over 7103.00 frames.], tot_loss[loss=0.2206, simple_loss=0.4412, pruned_loss=6.731, over 906625.71 frames.], batch size: 18, lr: 2.94e-03 +2022-05-27 14:13:58,069 INFO [train.py:823] (3/4) Epoch 2, batch 250, loss[loss=0.2041, simple_loss=0.4082, pruned_loss=6.581, over 7157.00 frames.], tot_loss[loss=0.2181, simple_loss=0.4362, pruned_loss=6.727, over 1017913.31 frames.], batch size: 17, lr: 2.94e-03 +2022-05-27 14:14:37,456 INFO [train.py:823] (3/4) Epoch 2, batch 300, loss[loss=0.1857, simple_loss=0.3714, pruned_loss=6.597, over 7013.00 frames.], tot_loss[loss=0.2169, simple_loss=0.4338, pruned_loss=6.733, over 1108790.99 frames.], batch size: 16, lr: 2.93e-03 +2022-05-27 14:15:20,545 INFO [train.py:823] (3/4) Epoch 2, batch 350, loss[loss=0.2378, simple_loss=0.4755, pruned_loss=6.809, over 7137.00 frames.], tot_loss[loss=0.2157, simple_loss=0.4315, pruned_loss=6.74, over 1176034.05 frames.], batch size: 23, lr: 2.93e-03 +2022-05-27 14:15:59,789 INFO [train.py:823] (3/4) Epoch 2, batch 400, loss[loss=0.2144, simple_loss=0.4289, pruned_loss=6.766, over 7100.00 frames.], tot_loss[loss=0.2144, simple_loss=0.4288, pruned_loss=6.742, over 1224745.08 frames.], batch size: 18, lr: 2.93e-03 +2022-05-27 14:16:38,921 INFO [train.py:823] (3/4) Epoch 2, batch 450, loss[loss=0.2077, simple_loss=0.4154, pruned_loss=6.783, over 7288.00 frames.], tot_loss[loss=0.2122, simple_loss=0.4244, pruned_loss=6.747, over 1265761.32 frames.], batch size: 21, lr: 2.92e-03 +2022-05-27 14:17:18,187 INFO [train.py:823] (3/4) Epoch 2, batch 500, loss[loss=0.2197, simple_loss=0.4393, pruned_loss=6.833, over 6907.00 frames.], tot_loss[loss=0.2105, simple_loss=0.421, pruned_loss=6.751, over 1302081.72 frames.], batch size: 29, lr: 2.92e-03 +2022-05-27 14:17:57,128 INFO [train.py:823] (3/4) Epoch 2, batch 550, loss[loss=0.2087, simple_loss=0.4174, pruned_loss=6.742, over 5203.00 frames.], tot_loss[loss=0.2088, simple_loss=0.4175, pruned_loss=6.751, over 1324000.87 frames.], batch size: 46, lr: 2.92e-03 +2022-05-27 14:18:36,763 INFO [train.py:823] (3/4) Epoch 2, batch 600, loss[loss=0.2219, simple_loss=0.4438, pruned_loss=6.849, over 7284.00 frames.], tot_loss[loss=0.2085, simple_loss=0.417, pruned_loss=6.755, over 1341074.43 frames.], batch size: 21, lr: 2.91e-03 +2022-05-27 14:19:16,317 INFO [train.py:823] (3/4) Epoch 2, batch 650, loss[loss=0.2214, simple_loss=0.4429, pruned_loss=6.825, over 7290.00 frames.], tot_loss[loss=0.2072, simple_loss=0.4144, pruned_loss=6.764, over 1358983.62 frames.], batch size: 22, lr: 2.91e-03 +2022-05-27 14:19:56,941 INFO [train.py:823] (3/4) Epoch 2, batch 700, loss[loss=0.1902, simple_loss=0.3804, pruned_loss=6.715, over 7026.00 frames.], tot_loss[loss=0.2052, simple_loss=0.4104, pruned_loss=6.763, over 1374884.45 frames.], batch size: 17, lr: 2.90e-03 +2022-05-27 14:20:36,689 INFO [train.py:823] (3/4) Epoch 2, batch 750, loss[loss=0.1973, simple_loss=0.3946, pruned_loss=6.832, over 7459.00 frames.], tot_loss[loss=0.2034, simple_loss=0.4069, pruned_loss=6.764, over 1381415.13 frames.], batch size: 21, lr: 2.90e-03 +2022-05-27 14:21:16,516 INFO [train.py:823] (3/4) Epoch 2, batch 800, loss[loss=0.1979, simple_loss=0.3958, pruned_loss=6.816, over 4799.00 frames.], tot_loss[loss=0.2024, simple_loss=0.4048, pruned_loss=6.77, over 1386790.36 frames.], batch size: 47, lr: 2.89e-03 +2022-05-27 14:21:57,209 INFO [train.py:823] (3/4) Epoch 2, batch 850, loss[loss=0.1895, simple_loss=0.379, pruned_loss=6.753, over 7188.00 frames.], tot_loss[loss=0.2012, simple_loss=0.4025, pruned_loss=6.77, over 1390606.15 frames.], batch size: 20, lr: 2.89e-03 +2022-05-27 14:22:36,504 INFO [train.py:823] (3/4) Epoch 2, batch 900, loss[loss=0.1698, simple_loss=0.3397, pruned_loss=6.644, over 7319.00 frames.], tot_loss[loss=0.1996, simple_loss=0.3991, pruned_loss=6.773, over 1395070.64 frames.], batch size: 18, lr: 2.89e-03 +2022-05-27 14:23:29,966 INFO [train.py:823] (3/4) Epoch 3, batch 0, loss[loss=0.1669, simple_loss=0.3338, pruned_loss=6.636, over 7307.00 frames.], tot_loss[loss=0.1669, simple_loss=0.3338, pruned_loss=6.636, over 7307.00 frames.], batch size: 17, lr: 2.83e-03 +2022-05-27 14:24:09,550 INFO [train.py:823] (3/4) Epoch 3, batch 50, loss[loss=0.2017, simple_loss=0.4033, pruned_loss=6.775, over 4808.00 frames.], tot_loss[loss=0.1885, simple_loss=0.377, pruned_loss=6.756, over 319508.92 frames.], batch size: 46, lr: 2.82e-03 +2022-05-27 14:24:48,976 INFO [train.py:823] (3/4) Epoch 3, batch 100, loss[loss=0.1881, simple_loss=0.3763, pruned_loss=6.824, over 7003.00 frames.], tot_loss[loss=0.1899, simple_loss=0.3799, pruned_loss=6.764, over 565311.34 frames.], batch size: 26, lr: 2.82e-03 +2022-05-27 14:25:28,633 INFO [train.py:823] (3/4) Epoch 3, batch 150, loss[loss=0.2051, simple_loss=0.4101, pruned_loss=6.791, over 7376.00 frames.], tot_loss[loss=0.1887, simple_loss=0.3773, pruned_loss=6.767, over 756448.47 frames.], batch size: 20, lr: 2.81e-03 +2022-05-27 14:26:08,205 INFO [train.py:823] (3/4) Epoch 3, batch 200, loss[loss=0.1993, simple_loss=0.3985, pruned_loss=6.793, over 7099.00 frames.], tot_loss[loss=0.1881, simple_loss=0.3763, pruned_loss=6.769, over 907484.79 frames.], batch size: 20, lr: 2.81e-03 +2022-05-27 14:26:47,700 INFO [train.py:823] (3/4) Epoch 3, batch 250, loss[loss=0.1751, simple_loss=0.3502, pruned_loss=6.818, over 7034.00 frames.], tot_loss[loss=0.1874, simple_loss=0.3748, pruned_loss=6.774, over 1025930.32 frames.], batch size: 26, lr: 2.80e-03 +2022-05-27 14:27:26,961 INFO [train.py:823] (3/4) Epoch 3, batch 300, loss[loss=0.1728, simple_loss=0.3456, pruned_loss=6.723, over 7394.00 frames.], tot_loss[loss=0.1874, simple_loss=0.3748, pruned_loss=6.783, over 1115552.34 frames.], batch size: 19, lr: 2.80e-03 +2022-05-27 14:28:06,714 INFO [train.py:823] (3/4) Epoch 3, batch 350, loss[loss=0.1919, simple_loss=0.3838, pruned_loss=6.872, over 7346.00 frames.], tot_loss[loss=0.1871, simple_loss=0.3741, pruned_loss=6.791, over 1186029.68 frames.], batch size: 23, lr: 2.79e-03 +2022-05-27 14:28:45,750 INFO [train.py:823] (3/4) Epoch 3, batch 400, loss[loss=0.1652, simple_loss=0.3304, pruned_loss=6.678, over 7299.00 frames.], tot_loss[loss=0.1866, simple_loss=0.3732, pruned_loss=6.788, over 1238992.70 frames.], batch size: 18, lr: 2.79e-03 +2022-05-27 14:29:24,544 INFO [train.py:823] (3/4) Epoch 3, batch 450, loss[loss=0.167, simple_loss=0.334, pruned_loss=6.675, over 7184.00 frames.], tot_loss[loss=0.1863, simple_loss=0.3726, pruned_loss=6.789, over 1272241.84 frames.], batch size: 18, lr: 2.78e-03 +2022-05-27 14:30:03,751 INFO [train.py:823] (3/4) Epoch 3, batch 500, loss[loss=0.1567, simple_loss=0.3134, pruned_loss=6.698, over 7301.00 frames.], tot_loss[loss=0.1852, simple_loss=0.3705, pruned_loss=6.791, over 1304119.04 frames.], batch size: 18, lr: 2.77e-03 +2022-05-27 14:30:42,946 INFO [train.py:823] (3/4) Epoch 3, batch 550, loss[loss=0.1822, simple_loss=0.3643, pruned_loss=6.783, over 7180.00 frames.], tot_loss[loss=0.1858, simple_loss=0.3716, pruned_loss=6.796, over 1332772.79 frames.], batch size: 21, lr: 2.77e-03 +2022-05-27 14:31:21,927 INFO [train.py:823] (3/4) Epoch 3, batch 600, loss[loss=0.1884, simple_loss=0.3769, pruned_loss=6.829, over 7381.00 frames.], tot_loss[loss=0.1848, simple_loss=0.3696, pruned_loss=6.787, over 1345669.07 frames.], batch size: 20, lr: 2.76e-03 +2022-05-27 14:32:01,097 INFO [train.py:823] (3/4) Epoch 3, batch 650, loss[loss=0.2081, simple_loss=0.4162, pruned_loss=6.846, over 5170.00 frames.], tot_loss[loss=0.1847, simple_loss=0.3694, pruned_loss=6.796, over 1362793.28 frames.], batch size: 48, lr: 2.76e-03 +2022-05-27 14:32:40,555 INFO [train.py:823] (3/4) Epoch 3, batch 700, loss[loss=0.2009, simple_loss=0.4018, pruned_loss=6.907, over 7291.00 frames.], tot_loss[loss=0.1838, simple_loss=0.3675, pruned_loss=6.796, over 1375342.83 frames.], batch size: 22, lr: 2.75e-03 +2022-05-27 14:33:19,794 INFO [train.py:823] (3/4) Epoch 3, batch 750, loss[loss=0.1803, simple_loss=0.3606, pruned_loss=6.829, over 7220.00 frames.], tot_loss[loss=0.183, simple_loss=0.3661, pruned_loss=6.798, over 1383430.35 frames.], batch size: 19, lr: 2.75e-03 +2022-05-27 14:33:58,495 INFO [train.py:823] (3/4) Epoch 3, batch 800, loss[loss=0.2023, simple_loss=0.4046, pruned_loss=6.893, over 7419.00 frames.], tot_loss[loss=0.1826, simple_loss=0.3652, pruned_loss=6.794, over 1393435.05 frames.], batch size: 22, lr: 2.74e-03 +2022-05-27 14:34:38,109 INFO [train.py:823] (3/4) Epoch 3, batch 850, loss[loss=0.1763, simple_loss=0.3526, pruned_loss=6.714, over 7094.00 frames.], tot_loss[loss=0.1822, simple_loss=0.3643, pruned_loss=6.793, over 1395895.99 frames.], batch size: 19, lr: 2.74e-03 +2022-05-27 14:35:16,875 INFO [train.py:823] (3/4) Epoch 3, batch 900, loss[loss=0.1762, simple_loss=0.3524, pruned_loss=6.802, over 5249.00 frames.], tot_loss[loss=0.1822, simple_loss=0.3644, pruned_loss=6.796, over 1393089.78 frames.], batch size: 47, lr: 2.73e-03 +2022-05-27 14:36:10,876 INFO [train.py:823] (3/4) Epoch 4, batch 0, loss[loss=0.1603, simple_loss=0.3207, pruned_loss=6.747, over 7100.00 frames.], tot_loss[loss=0.1603, simple_loss=0.3207, pruned_loss=6.747, over 7100.00 frames.], batch size: 19, lr: 2.64e-03 +2022-05-27 14:36:49,925 INFO [train.py:823] (3/4) Epoch 4, batch 50, loss[loss=0.1492, simple_loss=0.2983, pruned_loss=6.693, over 7019.00 frames.], tot_loss[loss=0.1701, simple_loss=0.3403, pruned_loss=6.783, over 319738.31 frames.], batch size: 17, lr: 2.64e-03 +2022-05-27 14:37:30,247 INFO [train.py:823] (3/4) Epoch 4, batch 100, loss[loss=0.1962, simple_loss=0.3924, pruned_loss=6.886, over 7378.00 frames.], tot_loss[loss=0.1729, simple_loss=0.3458, pruned_loss=6.791, over 565049.83 frames.], batch size: 21, lr: 2.63e-03 +2022-05-27 14:38:10,813 INFO [train.py:823] (3/4) Epoch 4, batch 150, loss[loss=0.1894, simple_loss=0.3788, pruned_loss=6.777, over 7163.00 frames.], tot_loss[loss=0.1744, simple_loss=0.3488, pruned_loss=6.799, over 751625.30 frames.], batch size: 17, lr: 2.63e-03 +2022-05-27 14:38:51,376 INFO [train.py:823] (3/4) Epoch 4, batch 200, loss[loss=0.2514, simple_loss=0.3163, pruned_loss=0.9325, over 7186.00 frames.], tot_loss[loss=0.2715, simple_loss=0.3628, pruned_loss=4.919, over 903849.80 frames.], batch size: 18, lr: 2.62e-03 +2022-05-27 14:39:31,807 INFO [train.py:823] (3/4) Epoch 4, batch 250, loss[loss=0.2647, simple_loss=0.379, pruned_loss=0.7516, over 7376.00 frames.], tot_loss[loss=0.2697, simple_loss=0.3604, pruned_loss=3.659, over 1022500.21 frames.], batch size: 21, lr: 2.62e-03 +2022-05-27 14:40:11,126 INFO [train.py:823] (3/4) Epoch 4, batch 300, loss[loss=0.2356, simple_loss=0.3772, pruned_loss=0.4705, over 7197.00 frames.], tot_loss[loss=0.26, simple_loss=0.359, pruned_loss=2.79, over 1108406.27 frames.], batch size: 20, lr: 2.61e-03 +2022-05-27 14:40:49,909 INFO [train.py:823] (3/4) Epoch 4, batch 350, loss[loss=0.2153, simple_loss=0.3654, pruned_loss=0.3255, over 7146.00 frames.], tot_loss[loss=0.248, simple_loss=0.3575, pruned_loss=2.153, over 1172561.77 frames.], batch size: 23, lr: 2.60e-03 +2022-05-27 14:41:29,693 INFO [train.py:823] (3/4) Epoch 4, batch 400, loss[loss=0.2113, simple_loss=0.3684, pruned_loss=0.2712, over 7213.00 frames.], tot_loss[loss=0.2369, simple_loss=0.356, pruned_loss=1.675, over 1225880.67 frames.], batch size: 25, lr: 2.60e-03 +2022-05-27 14:42:08,265 INFO [train.py:823] (3/4) Epoch 4, batch 450, loss[loss=0.1699, simple_loss=0.3048, pruned_loss=0.1753, over 7151.00 frames.], tot_loss[loss=0.2273, simple_loss=0.3543, pruned_loss=1.319, over 1268208.90 frames.], batch size: 17, lr: 2.59e-03 +2022-05-27 14:42:47,512 INFO [train.py:823] (3/4) Epoch 4, batch 500, loss[loss=0.1996, simple_loss=0.358, pruned_loss=0.2061, over 7239.00 frames.], tot_loss[loss=0.2197, simple_loss=0.3533, pruned_loss=1.05, over 1304437.58 frames.], batch size: 25, lr: 2.59e-03 +2022-05-27 14:43:26,680 INFO [train.py:823] (3/4) Epoch 4, batch 550, loss[loss=0.179, simple_loss=0.3242, pruned_loss=0.1691, over 7393.00 frames.], tot_loss[loss=0.213, simple_loss=0.351, pruned_loss=0.8467, over 1331958.26 frames.], batch size: 19, lr: 2.58e-03 +2022-05-27 14:44:06,026 INFO [train.py:823] (3/4) Epoch 4, batch 600, loss[loss=0.2051, simple_loss=0.3713, pruned_loss=0.1948, over 7181.00 frames.], tot_loss[loss=0.2082, simple_loss=0.3503, pruned_loss=0.6915, over 1354207.18 frames.], batch size: 21, lr: 2.57e-03 +2022-05-27 14:44:44,733 INFO [train.py:823] (3/4) Epoch 4, batch 650, loss[loss=0.185, simple_loss=0.334, pruned_loss=0.1807, over 7370.00 frames.], tot_loss[loss=0.2035, simple_loss=0.3481, pruned_loss=0.5723, over 1369919.11 frames.], batch size: 20, lr: 2.57e-03 +2022-05-27 14:45:23,911 INFO [train.py:823] (3/4) Epoch 4, batch 700, loss[loss=0.2007, simple_loss=0.3621, pruned_loss=0.196, over 4621.00 frames.], tot_loss[loss=0.2021, simple_loss=0.3504, pruned_loss=0.4846, over 1375803.70 frames.], batch size: 47, lr: 2.56e-03 +2022-05-27 14:46:02,680 INFO [train.py:823] (3/4) Epoch 4, batch 750, loss[loss=0.176, simple_loss=0.3234, pruned_loss=0.1429, over 7104.00 frames.], tot_loss[loss=0.1994, simple_loss=0.3494, pruned_loss=0.4132, over 1384215.45 frames.], batch size: 19, lr: 2.56e-03 +2022-05-27 14:46:42,067 INFO [train.py:823] (3/4) Epoch 4, batch 800, loss[loss=0.1784, simple_loss=0.3272, pruned_loss=0.1487, over 7032.00 frames.], tot_loss[loss=0.1971, simple_loss=0.3485, pruned_loss=0.3577, over 1385858.75 frames.], batch size: 17, lr: 2.55e-03 +2022-05-27 14:47:21,034 INFO [train.py:823] (3/4) Epoch 4, batch 850, loss[loss=0.1703, simple_loss=0.3165, pruned_loss=0.121, over 7300.00 frames.], tot_loss[loss=0.1952, simple_loss=0.3478, pruned_loss=0.3129, over 1391874.54 frames.], batch size: 22, lr: 2.54e-03 +2022-05-27 14:47:59,974 INFO [train.py:823] (3/4) Epoch 4, batch 900, loss[loss=0.1634, simple_loss=0.305, pruned_loss=0.1091, over 7188.00 frames.], tot_loss[loss=0.1928, simple_loss=0.3457, pruned_loss=0.2776, over 1388604.74 frames.], batch size: 18, lr: 2.54e-03 +2022-05-27 14:48:51,285 INFO [train.py:823] (3/4) Epoch 5, batch 0, loss[loss=0.1915, simple_loss=0.3536, pruned_loss=0.1472, over 7343.00 frames.], tot_loss[loss=0.1915, simple_loss=0.3536, pruned_loss=0.1472, over 7343.00 frames.], batch size: 23, lr: 2.44e-03 +2022-05-27 14:49:30,569 INFO [train.py:823] (3/4) Epoch 5, batch 50, loss[loss=0.186, simple_loss=0.3397, pruned_loss=0.1614, over 6987.00 frames.], tot_loss[loss=0.1836, simple_loss=0.3376, pruned_loss=0.1476, over 326408.56 frames.], batch size: 26, lr: 2.44e-03 +2022-05-27 14:50:10,139 INFO [train.py:823] (3/4) Epoch 5, batch 100, loss[loss=0.1781, simple_loss=0.3301, pruned_loss=0.1305, over 7114.00 frames.], tot_loss[loss=0.182, simple_loss=0.3354, pruned_loss=0.1431, over 570754.53 frames.], batch size: 20, lr: 2.43e-03 +2022-05-27 14:50:49,501 INFO [train.py:823] (3/4) Epoch 5, batch 150, loss[loss=0.1866, simple_loss=0.3428, pruned_loss=0.1517, over 7372.00 frames.], tot_loss[loss=0.1813, simple_loss=0.3341, pruned_loss=0.1428, over 758784.07 frames.], batch size: 20, lr: 2.42e-03 +2022-05-27 14:51:28,546 INFO [train.py:823] (3/4) Epoch 5, batch 200, loss[loss=0.2044, simple_loss=0.3784, pruned_loss=0.1526, over 7190.00 frames.], tot_loss[loss=0.182, simple_loss=0.3355, pruned_loss=0.1421, over 905035.96 frames.], batch size: 22, lr: 2.42e-03 +2022-05-27 14:52:07,912 INFO [train.py:823] (3/4) Epoch 5, batch 250, loss[loss=0.1722, simple_loss=0.3208, pruned_loss=0.1179, over 4485.00 frames.], tot_loss[loss=0.1824, simple_loss=0.3361, pruned_loss=0.1438, over 1012272.04 frames.], batch size: 47, lr: 2.41e-03 +2022-05-27 14:52:46,772 INFO [train.py:823] (3/4) Epoch 5, batch 300, loss[loss=0.1894, simple_loss=0.3511, pruned_loss=0.1386, over 7148.00 frames.], tot_loss[loss=0.1835, simple_loss=0.3381, pruned_loss=0.1438, over 1103412.50 frames.], batch size: 23, lr: 2.41e-03 +2022-05-27 14:53:26,258 INFO [train.py:823] (3/4) Epoch 5, batch 350, loss[loss=0.2023, simple_loss=0.3742, pruned_loss=0.1526, over 7237.00 frames.], tot_loss[loss=0.1831, simple_loss=0.3378, pruned_loss=0.1424, over 1173855.80 frames.], batch size: 24, lr: 2.40e-03 +2022-05-27 14:54:05,607 INFO [train.py:823] (3/4) Epoch 5, batch 400, loss[loss=0.1607, simple_loss=0.2981, pruned_loss=0.1165, over 7441.00 frames.], tot_loss[loss=0.1823, simple_loss=0.3366, pruned_loss=0.1403, over 1233372.86 frames.], batch size: 18, lr: 2.39e-03 +2022-05-27 14:54:45,111 INFO [train.py:823] (3/4) Epoch 5, batch 450, loss[loss=0.1796, simple_loss=0.3347, pruned_loss=0.1222, over 7041.00 frames.], tot_loss[loss=0.1822, simple_loss=0.3366, pruned_loss=0.1392, over 1268872.25 frames.], batch size: 26, lr: 2.39e-03 +2022-05-27 14:55:24,661 INFO [train.py:823] (3/4) Epoch 5, batch 500, loss[loss=0.1547, simple_loss=0.2906, pruned_loss=0.09402, over 7187.00 frames.], tot_loss[loss=0.1807, simple_loss=0.3343, pruned_loss=0.1357, over 1303888.56 frames.], batch size: 19, lr: 2.38e-03 +2022-05-27 14:56:03,771 INFO [train.py:823] (3/4) Epoch 5, batch 550, loss[loss=0.1939, simple_loss=0.3583, pruned_loss=0.1474, over 6938.00 frames.], tot_loss[loss=0.1796, simple_loss=0.3325, pruned_loss=0.1333, over 1329293.81 frames.], batch size: 29, lr: 2.38e-03 +2022-05-27 14:56:42,909 INFO [train.py:823] (3/4) Epoch 5, batch 600, loss[loss=0.1854, simple_loss=0.3465, pruned_loss=0.1219, over 6534.00 frames.], tot_loss[loss=0.1801, simple_loss=0.3334, pruned_loss=0.134, over 1348238.86 frames.], batch size: 34, lr: 2.37e-03 +2022-05-27 14:57:22,163 INFO [train.py:823] (3/4) Epoch 5, batch 650, loss[loss=0.1815, simple_loss=0.3405, pruned_loss=0.1128, over 7287.00 frames.], tot_loss[loss=0.1793, simple_loss=0.3322, pruned_loss=0.1322, over 1363649.51 frames.], batch size: 21, lr: 2.37e-03 +2022-05-27 14:58:00,839 INFO [train.py:823] (3/4) Epoch 5, batch 700, loss[loss=0.1986, simple_loss=0.3629, pruned_loss=0.1714, over 7008.00 frames.], tot_loss[loss=0.1791, simple_loss=0.332, pruned_loss=0.1312, over 1372859.12 frames.], batch size: 26, lr: 2.36e-03 +2022-05-27 14:58:39,881 INFO [train.py:823] (3/4) Epoch 5, batch 750, loss[loss=0.1971, simple_loss=0.3626, pruned_loss=0.1579, over 7144.00 frames.], tot_loss[loss=0.1796, simple_loss=0.333, pruned_loss=0.1308, over 1381295.10 frames.], batch size: 23, lr: 2.35e-03 +2022-05-27 14:59:18,661 INFO [train.py:823] (3/4) Epoch 5, batch 800, loss[loss=0.1956, simple_loss=0.3597, pruned_loss=0.1573, over 4870.00 frames.], tot_loss[loss=0.1789, simple_loss=0.3318, pruned_loss=0.1296, over 1391093.48 frames.], batch size: 47, lr: 2.35e-03 +2022-05-27 14:59:59,085 INFO [train.py:823] (3/4) Epoch 5, batch 850, loss[loss=0.1557, simple_loss=0.2928, pruned_loss=0.0926, over 7144.00 frames.], tot_loss[loss=0.1783, simple_loss=0.3309, pruned_loss=0.1286, over 1396734.11 frames.], batch size: 17, lr: 2.34e-03 +2022-05-27 15:00:37,907 INFO [train.py:823] (3/4) Epoch 5, batch 900, loss[loss=0.1871, simple_loss=0.3482, pruned_loss=0.13, over 6950.00 frames.], tot_loss[loss=0.178, simple_loss=0.3304, pruned_loss=0.1276, over 1399130.57 frames.], batch size: 29, lr: 2.34e-03 +2022-05-27 15:01:33,786 INFO [train.py:823] (3/4) Epoch 6, batch 0, loss[loss=0.1904, simple_loss=0.3527, pruned_loss=0.1402, over 7176.00 frames.], tot_loss[loss=0.1904, simple_loss=0.3527, pruned_loss=0.1402, over 7176.00 frames.], batch size: 22, lr: 2.24e-03 +2022-05-27 15:02:12,500 INFO [train.py:823] (3/4) Epoch 6, batch 50, loss[loss=0.1803, simple_loss=0.3374, pruned_loss=0.1157, over 7170.00 frames.], tot_loss[loss=0.1757, simple_loss=0.3278, pruned_loss=0.1182, over 318579.50 frames.], batch size: 21, lr: 2.23e-03 +2022-05-27 15:02:52,433 INFO [train.py:823] (3/4) Epoch 6, batch 100, loss[loss=0.1759, simple_loss=0.3279, pruned_loss=0.1198, over 7237.00 frames.], tot_loss[loss=0.1721, simple_loss=0.3213, pruned_loss=0.1142, over 565522.97 frames.], batch size: 24, lr: 2.23e-03 +2022-05-27 15:03:32,932 INFO [train.py:823] (3/4) Epoch 6, batch 150, loss[loss=0.1726, simple_loss=0.3238, pruned_loss=0.1071, over 7295.00 frames.], tot_loss[loss=0.1727, simple_loss=0.3224, pruned_loss=0.1149, over 754874.04 frames.], batch size: 19, lr: 2.22e-03 +2022-05-27 15:04:12,196 INFO [train.py:823] (3/4) Epoch 6, batch 200, loss[loss=0.1987, simple_loss=0.3647, pruned_loss=0.1633, over 7205.00 frames.], tot_loss[loss=0.1733, simple_loss=0.3233, pruned_loss=0.1165, over 900503.61 frames.], batch size: 25, lr: 2.22e-03 +2022-05-27 15:04:50,830 INFO [train.py:823] (3/4) Epoch 6, batch 250, loss[loss=0.1819, simple_loss=0.3359, pruned_loss=0.1394, over 6602.00 frames.], tot_loss[loss=0.1748, simple_loss=0.3258, pruned_loss=0.1185, over 1016955.04 frames.], batch size: 34, lr: 2.21e-03 +2022-05-27 15:05:29,770 INFO [train.py:823] (3/4) Epoch 6, batch 300, loss[loss=0.1888, simple_loss=0.3507, pruned_loss=0.1349, over 7201.00 frames.], tot_loss[loss=0.1742, simple_loss=0.325, pruned_loss=0.1167, over 1107411.56 frames.], batch size: 20, lr: 2.21e-03 +2022-05-27 15:06:08,859 INFO [train.py:823] (3/4) Epoch 6, batch 350, loss[loss=0.1635, simple_loss=0.3041, pruned_loss=0.1147, over 7091.00 frames.], tot_loss[loss=0.1738, simple_loss=0.3244, pruned_loss=0.116, over 1178301.96 frames.], batch size: 18, lr: 2.20e-03 +2022-05-27 15:06:48,079 INFO [train.py:823] (3/4) Epoch 6, batch 400, loss[loss=0.178, simple_loss=0.3305, pruned_loss=0.127, over 7177.00 frames.], tot_loss[loss=0.1724, simple_loss=0.322, pruned_loss=0.1141, over 1234911.80 frames.], batch size: 22, lr: 2.19e-03 +2022-05-27 15:07:26,450 INFO [train.py:823] (3/4) Epoch 6, batch 450, loss[loss=0.1795, simple_loss=0.3355, pruned_loss=0.1172, over 6728.00 frames.], tot_loss[loss=0.172, simple_loss=0.3214, pruned_loss=0.1134, over 1267775.23 frames.], batch size: 34, lr: 2.19e-03 +2022-05-27 15:08:05,533 INFO [train.py:823] (3/4) Epoch 6, batch 500, loss[loss=0.201, simple_loss=0.3717, pruned_loss=0.1514, over 7146.00 frames.], tot_loss[loss=0.1722, simple_loss=0.3218, pruned_loss=0.1132, over 1298292.52 frames.], batch size: 23, lr: 2.18e-03 +2022-05-27 15:08:44,721 INFO [train.py:823] (3/4) Epoch 6, batch 550, loss[loss=0.1523, simple_loss=0.2887, pruned_loss=0.07916, over 7094.00 frames.], tot_loss[loss=0.1727, simple_loss=0.3227, pruned_loss=0.1134, over 1325533.12 frames.], batch size: 18, lr: 2.18e-03 +2022-05-27 15:09:24,184 INFO [train.py:823] (3/4) Epoch 6, batch 600, loss[loss=0.1747, simple_loss=0.3265, pruned_loss=0.1149, over 7094.00 frames.], tot_loss[loss=0.1729, simple_loss=0.3229, pruned_loss=0.1142, over 1343796.35 frames.], batch size: 18, lr: 2.17e-03 +2022-05-27 15:10:02,620 INFO [train.py:823] (3/4) Epoch 6, batch 650, loss[loss=0.156, simple_loss=0.2964, pruned_loss=0.0775, over 7384.00 frames.], tot_loss[loss=0.1721, simple_loss=0.3216, pruned_loss=0.1129, over 1361719.80 frames.], batch size: 19, lr: 2.17e-03 +2022-05-27 15:10:41,875 INFO [train.py:823] (3/4) Epoch 6, batch 700, loss[loss=0.1624, simple_loss=0.3059, pruned_loss=0.09451, over 7192.00 frames.], tot_loss[loss=0.1719, simple_loss=0.3214, pruned_loss=0.1121, over 1376338.36 frames.], batch size: 19, lr: 2.16e-03 +2022-05-27 15:11:21,026 INFO [train.py:823] (3/4) Epoch 6, batch 750, loss[loss=0.1594, simple_loss=0.2977, pruned_loss=0.1058, over 7096.00 frames.], tot_loss[loss=0.1723, simple_loss=0.3221, pruned_loss=0.1124, over 1384482.76 frames.], batch size: 19, lr: 2.16e-03 +2022-05-27 15:12:00,594 INFO [train.py:823] (3/4) Epoch 6, batch 800, loss[loss=0.1544, simple_loss=0.2893, pruned_loss=0.0977, over 7013.00 frames.], tot_loss[loss=0.1713, simple_loss=0.3205, pruned_loss=0.1108, over 1390764.59 frames.], batch size: 16, lr: 2.15e-03 +2022-05-27 15:12:39,820 INFO [train.py:823] (3/4) Epoch 6, batch 850, loss[loss=0.1655, simple_loss=0.3091, pruned_loss=0.1093, over 7215.00 frames.], tot_loss[loss=0.1706, simple_loss=0.3193, pruned_loss=0.1101, over 1394444.71 frames.], batch size: 16, lr: 2.15e-03 +2022-05-27 15:13:19,387 INFO [train.py:823] (3/4) Epoch 6, batch 900, loss[loss=0.1607, simple_loss=0.3015, pruned_loss=0.09929, over 7298.00 frames.], tot_loss[loss=0.1702, simple_loss=0.3185, pruned_loss=0.1094, over 1397524.58 frames.], batch size: 17, lr: 2.14e-03 +2022-05-27 15:14:12,733 INFO [train.py:823] (3/4) Epoch 7, batch 0, loss[loss=0.1461, simple_loss=0.2775, pruned_loss=0.07349, over 7106.00 frames.], tot_loss[loss=0.1461, simple_loss=0.2775, pruned_loss=0.07349, over 7106.00 frames.], batch size: 19, lr: 2.05e-03 +2022-05-27 15:14:52,663 INFO [train.py:823] (3/4) Epoch 7, batch 50, loss[loss=0.1494, simple_loss=0.2777, pruned_loss=0.1056, over 7264.00 frames.], tot_loss[loss=0.1645, simple_loss=0.3091, pruned_loss=0.09943, over 322826.00 frames.], batch size: 16, lr: 2.04e-03 +2022-05-27 15:15:31,862 INFO [train.py:823] (3/4) Epoch 7, batch 100, loss[loss=0.1567, simple_loss=0.2963, pruned_loss=0.08543, over 7115.00 frames.], tot_loss[loss=0.1638, simple_loss=0.3076, pruned_loss=0.1, over 562833.18 frames.], batch size: 20, lr: 2.04e-03 +2022-05-27 15:16:10,851 INFO [train.py:823] (3/4) Epoch 7, batch 150, loss[loss=0.1538, simple_loss=0.2913, pruned_loss=0.08192, over 7369.00 frames.], tot_loss[loss=0.1653, simple_loss=0.3104, pruned_loss=0.1015, over 753380.26 frames.], batch size: 21, lr: 2.03e-03 +2022-05-27 15:16:50,107 INFO [train.py:823] (3/4) Epoch 7, batch 200, loss[loss=0.1688, simple_loss=0.3149, pruned_loss=0.1136, over 7017.00 frames.], tot_loss[loss=0.1663, simple_loss=0.312, pruned_loss=0.1033, over 904854.02 frames.], batch size: 26, lr: 2.03e-03 +2022-05-27 15:17:29,151 INFO [train.py:823] (3/4) Epoch 7, batch 250, loss[loss=0.1661, simple_loss=0.3118, pruned_loss=0.1019, over 7285.00 frames.], tot_loss[loss=0.1662, simple_loss=0.3118, pruned_loss=0.1026, over 1019747.30 frames.], batch size: 22, lr: 2.02e-03 +2022-05-27 15:18:07,974 INFO [train.py:823] (3/4) Epoch 7, batch 300, loss[loss=0.144, simple_loss=0.2731, pruned_loss=0.07455, over 7153.00 frames.], tot_loss[loss=0.1668, simple_loss=0.313, pruned_loss=0.1034, over 1108392.18 frames.], batch size: 17, lr: 2.02e-03 +2022-05-27 15:18:47,551 INFO [train.py:823] (3/4) Epoch 7, batch 350, loss[loss=0.2712, simple_loss=0.3278, pruned_loss=0.1073, over 7301.00 frames.], tot_loss[loss=0.1998, simple_loss=0.3174, pruned_loss=0.1094, over 1175452.57 frames.], batch size: 19, lr: 2.01e-03 +2022-05-27 15:19:26,446 INFO [train.py:823] (3/4) Epoch 7, batch 400, loss[loss=0.3418, simple_loss=0.3762, pruned_loss=0.1537, over 7342.00 frames.], tot_loss[loss=0.218, simple_loss=0.3187, pruned_loss=0.1095, over 1229721.06 frames.], batch size: 23, lr: 2.01e-03 +2022-05-27 15:20:05,983 INFO [train.py:823] (3/4) Epoch 7, batch 450, loss[loss=0.2527, simple_loss=0.3262, pruned_loss=0.0896, over 7176.00 frames.], tot_loss[loss=0.2303, simple_loss=0.3192, pruned_loss=0.109, over 1268036.93 frames.], batch size: 22, lr: 2.00e-03 +2022-05-27 15:20:45,040 INFO [train.py:823] (3/4) Epoch 7, batch 500, loss[loss=0.279, simple_loss=0.3309, pruned_loss=0.1136, over 6983.00 frames.], tot_loss[loss=0.2372, simple_loss=0.3187, pruned_loss=0.107, over 1302377.92 frames.], batch size: 26, lr: 2.00e-03 +2022-05-27 15:21:24,312 INFO [train.py:823] (3/4) Epoch 7, batch 550, loss[loss=0.2358, simple_loss=0.3074, pruned_loss=0.08215, over 6624.00 frames.], tot_loss[loss=0.2396, simple_loss=0.3162, pruned_loss=0.1037, over 1326561.35 frames.], batch size: 35, lr: 1.99e-03 +2022-05-27 15:22:03,263 INFO [train.py:823] (3/4) Epoch 7, batch 600, loss[loss=0.2541, simple_loss=0.3196, pruned_loss=0.09432, over 7375.00 frames.], tot_loss[loss=0.2449, simple_loss=0.3169, pruned_loss=0.1035, over 1344356.52 frames.], batch size: 21, lr: 1.99e-03 +2022-05-27 15:22:42,733 INFO [train.py:823] (3/4) Epoch 7, batch 650, loss[loss=0.2531, simple_loss=0.3252, pruned_loss=0.09045, over 7111.00 frames.], tot_loss[loss=0.2484, simple_loss=0.317, pruned_loss=0.103, over 1360638.16 frames.], batch size: 20, lr: 1.98e-03 +2022-05-27 15:23:24,573 INFO [train.py:823] (3/4) Epoch 7, batch 700, loss[loss=0.2594, simple_loss=0.3138, pruned_loss=0.1025, over 7102.00 frames.], tot_loss[loss=0.2517, simple_loss=0.318, pruned_loss=0.1029, over 1369876.03 frames.], batch size: 18, lr: 1.98e-03 +2022-05-27 15:24:03,844 INFO [train.py:823] (3/4) Epoch 7, batch 750, loss[loss=0.275, simple_loss=0.3292, pruned_loss=0.1104, over 6995.00 frames.], tot_loss[loss=0.2529, simple_loss=0.3176, pruned_loss=0.1019, over 1377658.05 frames.], batch size: 26, lr: 1.97e-03 +2022-05-27 15:24:43,866 INFO [train.py:823] (3/4) Epoch 7, batch 800, loss[loss=0.2436, simple_loss=0.303, pruned_loss=0.09214, over 7190.00 frames.], tot_loss[loss=0.2542, simple_loss=0.3181, pruned_loss=0.1012, over 1386934.11 frames.], batch size: 19, lr: 1.97e-03 +2022-05-27 15:25:23,397 INFO [train.py:823] (3/4) Epoch 7, batch 850, loss[loss=0.2969, simple_loss=0.355, pruned_loss=0.1194, over 7372.00 frames.], tot_loss[loss=0.2573, simple_loss=0.3202, pruned_loss=0.1019, over 1388010.37 frames.], batch size: 21, lr: 1.97e-03 +2022-05-27 15:26:02,048 INFO [train.py:823] (3/4) Epoch 7, batch 900, loss[loss=0.3062, simple_loss=0.3602, pruned_loss=0.1261, over 6913.00 frames.], tot_loss[loss=0.2577, simple_loss=0.3196, pruned_loss=0.1016, over 1390435.73 frames.], batch size: 29, lr: 1.96e-03 +2022-05-27 15:26:53,909 INFO [train.py:823] (3/4) Epoch 8, batch 0, loss[loss=0.2613, simple_loss=0.3197, pruned_loss=0.1014, over 7416.00 frames.], tot_loss[loss=0.2613, simple_loss=0.3197, pruned_loss=0.1014, over 7416.00 frames.], batch size: 22, lr: 1.88e-03 +2022-05-27 15:27:33,935 INFO [train.py:823] (3/4) Epoch 8, batch 50, loss[loss=0.322, simple_loss=0.3708, pruned_loss=0.1366, over 7238.00 frames.], tot_loss[loss=0.2503, simple_loss=0.3155, pruned_loss=0.09258, over 320843.21 frames.], batch size: 24, lr: 1.87e-03 +2022-05-27 15:28:13,342 INFO [train.py:823] (3/4) Epoch 8, batch 100, loss[loss=0.2287, simple_loss=0.2945, pruned_loss=0.0815, over 7030.00 frames.], tot_loss[loss=0.2518, simple_loss=0.3158, pruned_loss=0.09386, over 565330.38 frames.], batch size: 17, lr: 1.87e-03 +2022-05-27 15:28:52,057 INFO [train.py:823] (3/4) Epoch 8, batch 150, loss[loss=0.2573, simple_loss=0.3273, pruned_loss=0.09366, over 7277.00 frames.], tot_loss[loss=0.2518, simple_loss=0.3157, pruned_loss=0.09393, over 754033.82 frames.], batch size: 20, lr: 1.86e-03 +2022-05-27 15:29:31,415 INFO [train.py:823] (3/4) Epoch 8, batch 200, loss[loss=0.2605, simple_loss=0.3176, pruned_loss=0.1017, over 7009.00 frames.], tot_loss[loss=0.2482, simple_loss=0.3136, pruned_loss=0.0914, over 899741.48 frames.], batch size: 16, lr: 1.86e-03 +2022-05-27 15:30:10,538 INFO [train.py:823] (3/4) Epoch 8, batch 250, loss[loss=0.2506, simple_loss=0.3318, pruned_loss=0.08468, over 7153.00 frames.], tot_loss[loss=0.2454, simple_loss=0.311, pruned_loss=0.08988, over 1013712.26 frames.], batch size: 23, lr: 1.85e-03 +2022-05-27 15:30:49,871 INFO [train.py:823] (3/4) Epoch 8, batch 300, loss[loss=0.2358, simple_loss=0.3002, pruned_loss=0.08566, over 7387.00 frames.], tot_loss[loss=0.2458, simple_loss=0.311, pruned_loss=0.09026, over 1106028.30 frames.], batch size: 19, lr: 1.85e-03 +2022-05-27 15:31:28,805 INFO [train.py:823] (3/4) Epoch 8, batch 350, loss[loss=0.1961, simple_loss=0.2707, pruned_loss=0.06072, over 6997.00 frames.], tot_loss[loss=0.2464, simple_loss=0.3111, pruned_loss=0.09083, over 1166664.31 frames.], batch size: 16, lr: 1.85e-03 +2022-05-27 15:32:08,160 INFO [train.py:823] (3/4) Epoch 8, batch 400, loss[loss=0.3002, simple_loss=0.3563, pruned_loss=0.122, over 7172.00 frames.], tot_loss[loss=0.248, simple_loss=0.3126, pruned_loss=0.09165, over 1222656.52 frames.], batch size: 22, lr: 1.84e-03 +2022-05-27 15:32:47,410 INFO [train.py:823] (3/4) Epoch 8, batch 450, loss[loss=0.2189, simple_loss=0.3016, pruned_loss=0.06807, over 6566.00 frames.], tot_loss[loss=0.2458, simple_loss=0.3117, pruned_loss=0.08991, over 1266127.55 frames.], batch size: 34, lr: 1.84e-03 +2022-05-27 15:33:26,805 INFO [train.py:823] (3/4) Epoch 8, batch 500, loss[loss=0.1989, simple_loss=0.2654, pruned_loss=0.06619, over 7308.00 frames.], tot_loss[loss=0.2442, simple_loss=0.3106, pruned_loss=0.08885, over 1301651.53 frames.], batch size: 17, lr: 1.83e-03 +2022-05-27 15:34:05,464 INFO [train.py:823] (3/4) Epoch 8, batch 550, loss[loss=0.2963, simple_loss=0.3586, pruned_loss=0.117, over 7181.00 frames.], tot_loss[loss=0.2457, simple_loss=0.3127, pruned_loss=0.08938, over 1326271.33 frames.], batch size: 22, lr: 1.83e-03 +2022-05-27 15:34:44,945 INFO [train.py:823] (3/4) Epoch 8, batch 600, loss[loss=0.2597, simple_loss=0.3255, pruned_loss=0.09695, over 7003.00 frames.], tot_loss[loss=0.2461, simple_loss=0.3127, pruned_loss=0.08973, over 1343818.00 frames.], batch size: 17, lr: 1.82e-03 +2022-05-27 15:35:24,056 INFO [train.py:823] (3/4) Epoch 8, batch 650, loss[loss=0.2601, simple_loss=0.3274, pruned_loss=0.09645, over 7015.00 frames.], tot_loss[loss=0.2452, simple_loss=0.3123, pruned_loss=0.08906, over 1361575.18 frames.], batch size: 26, lr: 1.82e-03 +2022-05-27 15:36:03,183 INFO [train.py:823] (3/4) Epoch 8, batch 700, loss[loss=0.1998, simple_loss=0.2809, pruned_loss=0.05937, over 7290.00 frames.], tot_loss[loss=0.2448, simple_loss=0.3122, pruned_loss=0.08868, over 1379338.92 frames.], batch size: 19, lr: 1.82e-03 +2022-05-27 15:36:42,117 INFO [train.py:823] (3/4) Epoch 8, batch 750, loss[loss=0.2182, simple_loss=0.2893, pruned_loss=0.07356, over 7101.00 frames.], tot_loss[loss=0.2444, simple_loss=0.3117, pruned_loss=0.08854, over 1387055.75 frames.], batch size: 18, lr: 1.81e-03 +2022-05-27 15:37:21,603 INFO [train.py:823] (3/4) Epoch 8, batch 800, loss[loss=0.2723, simple_loss=0.3309, pruned_loss=0.1068, over 4746.00 frames.], tot_loss[loss=0.2447, simple_loss=0.3118, pruned_loss=0.08877, over 1387897.07 frames.], batch size: 46, lr: 1.81e-03 +2022-05-27 15:38:00,638 INFO [train.py:823] (3/4) Epoch 8, batch 850, loss[loss=0.2462, simple_loss=0.3111, pruned_loss=0.09068, over 7188.00 frames.], tot_loss[loss=0.2446, simple_loss=0.3114, pruned_loss=0.08887, over 1390259.63 frames.], batch size: 20, lr: 1.80e-03 +2022-05-27 15:38:39,760 INFO [train.py:823] (3/4) Epoch 8, batch 900, loss[loss=0.2519, simple_loss=0.3209, pruned_loss=0.0914, over 7089.00 frames.], tot_loss[loss=0.2453, simple_loss=0.3125, pruned_loss=0.08905, over 1394437.32 frames.], batch size: 18, lr: 1.80e-03 +2022-05-27 15:39:31,049 INFO [train.py:823] (3/4) Epoch 9, batch 0, loss[loss=0.2622, simple_loss=0.3304, pruned_loss=0.09705, over 7191.00 frames.], tot_loss[loss=0.2622, simple_loss=0.3304, pruned_loss=0.09705, over 7191.00 frames.], batch size: 21, lr: 1.72e-03 +2022-05-27 15:40:10,174 INFO [train.py:823] (3/4) Epoch 9, batch 50, loss[loss=0.1987, simple_loss=0.2755, pruned_loss=0.06096, over 7387.00 frames.], tot_loss[loss=0.2312, simple_loss=0.3011, pruned_loss=0.08067, over 319728.90 frames.], batch size: 19, lr: 1.72e-03 +2022-05-27 15:40:49,192 INFO [train.py:823] (3/4) Epoch 9, batch 100, loss[loss=0.2519, simple_loss=0.3187, pruned_loss=0.09256, over 7292.00 frames.], tot_loss[loss=0.2333, simple_loss=0.3036, pruned_loss=0.08154, over 563761.31 frames.], batch size: 19, lr: 1.71e-03 +2022-05-27 15:41:28,208 INFO [train.py:823] (3/4) Epoch 9, batch 150, loss[loss=0.2422, simple_loss=0.3105, pruned_loss=0.08692, over 7097.00 frames.], tot_loss[loss=0.2352, simple_loss=0.3059, pruned_loss=0.08227, over 754201.28 frames.], batch size: 19, lr: 1.71e-03 +2022-05-27 15:42:06,945 INFO [train.py:823] (3/4) Epoch 9, batch 200, loss[loss=0.202, simple_loss=0.2902, pruned_loss=0.05697, over 7286.00 frames.], tot_loss[loss=0.2349, simple_loss=0.3063, pruned_loss=0.08175, over 896585.99 frames.], batch size: 20, lr: 1.71e-03 +2022-05-27 15:42:46,368 INFO [train.py:823] (3/4) Epoch 9, batch 250, loss[loss=0.223, simple_loss=0.2931, pruned_loss=0.07641, over 7181.00 frames.], tot_loss[loss=0.2337, simple_loss=0.3053, pruned_loss=0.08104, over 1012626.59 frames.], batch size: 20, lr: 1.70e-03 +2022-05-27 15:43:24,911 INFO [train.py:823] (3/4) Epoch 9, batch 300, loss[loss=0.2196, simple_loss=0.3, pruned_loss=0.06957, over 7190.00 frames.], tot_loss[loss=0.2326, simple_loss=0.3044, pruned_loss=0.08038, over 1104358.04 frames.], batch size: 18, lr: 1.70e-03 +2022-05-27 15:44:04,333 INFO [train.py:823] (3/4) Epoch 9, batch 350, loss[loss=0.2007, simple_loss=0.2656, pruned_loss=0.06794, over 7287.00 frames.], tot_loss[loss=0.2324, simple_loss=0.3043, pruned_loss=0.08021, over 1173378.74 frames.], batch size: 17, lr: 1.70e-03 +2022-05-27 15:44:43,689 INFO [train.py:823] (3/4) Epoch 9, batch 400, loss[loss=0.2292, simple_loss=0.3167, pruned_loss=0.07083, over 7305.00 frames.], tot_loss[loss=0.2314, simple_loss=0.3034, pruned_loss=0.07967, over 1229830.52 frames.], batch size: 22, lr: 1.69e-03 +2022-05-27 15:45:28,873 INFO [train.py:823] (3/4) Epoch 9, batch 450, loss[loss=0.2077, simple_loss=0.2829, pruned_loss=0.06624, over 7192.00 frames.], tot_loss[loss=0.2344, simple_loss=0.3061, pruned_loss=0.08136, over 1271631.45 frames.], batch size: 19, lr: 1.69e-03 +2022-05-27 15:46:09,008 INFO [train.py:823] (3/4) Epoch 9, batch 500, loss[loss=0.2153, simple_loss=0.3054, pruned_loss=0.06259, over 7228.00 frames.], tot_loss[loss=0.2359, simple_loss=0.3074, pruned_loss=0.08216, over 1305188.81 frames.], batch size: 24, lr: 1.68e-03 +2022-05-27 15:46:48,244 INFO [train.py:823] (3/4) Epoch 9, batch 550, loss[loss=0.2849, simple_loss=0.3331, pruned_loss=0.1183, over 7204.00 frames.], tot_loss[loss=0.2366, simple_loss=0.3077, pruned_loss=0.08277, over 1333807.51 frames.], batch size: 19, lr: 1.68e-03 +2022-05-27 15:47:28,246 INFO [train.py:823] (3/4) Epoch 9, batch 600, loss[loss=0.2221, simple_loss=0.2869, pruned_loss=0.07862, over 7156.00 frames.], tot_loss[loss=0.2352, simple_loss=0.3064, pruned_loss=0.08201, over 1354101.53 frames.], batch size: 17, lr: 1.68e-03 +2022-05-27 15:48:07,787 INFO [train.py:823] (3/4) Epoch 9, batch 650, loss[loss=0.2488, simple_loss=0.3224, pruned_loss=0.08763, over 6961.00 frames.], tot_loss[loss=0.2347, simple_loss=0.306, pruned_loss=0.08172, over 1367118.30 frames.], batch size: 29, lr: 1.67e-03 +2022-05-27 15:48:46,366 INFO [train.py:823] (3/4) Epoch 9, batch 700, loss[loss=0.2347, simple_loss=0.3128, pruned_loss=0.07828, over 7294.00 frames.], tot_loss[loss=0.2345, simple_loss=0.3059, pruned_loss=0.08157, over 1375125.87 frames.], batch size: 22, lr: 1.67e-03 +2022-05-27 15:49:25,436 INFO [train.py:823] (3/4) Epoch 9, batch 750, loss[loss=0.1989, simple_loss=0.2631, pruned_loss=0.0673, over 7183.00 frames.], tot_loss[loss=0.2356, simple_loss=0.307, pruned_loss=0.0821, over 1385694.14 frames.], batch size: 18, lr: 1.67e-03 +2022-05-27 15:50:03,878 INFO [train.py:823] (3/4) Epoch 9, batch 800, loss[loss=0.2595, simple_loss=0.312, pruned_loss=0.1035, over 7096.00 frames.], tot_loss[loss=0.235, simple_loss=0.3068, pruned_loss=0.08162, over 1387983.55 frames.], batch size: 19, lr: 1.66e-03 +2022-05-27 15:50:43,436 INFO [train.py:823] (3/4) Epoch 9, batch 850, loss[loss=0.1687, simple_loss=0.2429, pruned_loss=0.0473, over 7286.00 frames.], tot_loss[loss=0.2339, simple_loss=0.3062, pruned_loss=0.08087, over 1397788.74 frames.], batch size: 16, lr: 1.66e-03 +2022-05-27 15:51:23,344 INFO [train.py:823] (3/4) Epoch 9, batch 900, loss[loss=0.2263, simple_loss=0.2834, pruned_loss=0.08458, over 7236.00 frames.], tot_loss[loss=0.2352, simple_loss=0.3071, pruned_loss=0.08164, over 1399798.41 frames.], batch size: 16, lr: 1.65e-03 +2022-05-27 15:52:14,291 INFO [train.py:823] (3/4) Epoch 10, batch 0, loss[loss=0.233, simple_loss=0.3059, pruned_loss=0.08002, over 7106.00 frames.], tot_loss[loss=0.233, simple_loss=0.3059, pruned_loss=0.08002, over 7106.00 frames.], batch size: 20, lr: 1.59e-03 +2022-05-27 15:52:52,887 INFO [train.py:823] (3/4) Epoch 10, batch 50, loss[loss=0.1714, simple_loss=0.2478, pruned_loss=0.04754, over 7028.00 frames.], tot_loss[loss=0.2257, simple_loss=0.3002, pruned_loss=0.07562, over 319356.87 frames.], batch size: 17, lr: 1.58e-03 +2022-05-27 15:53:32,178 INFO [train.py:823] (3/4) Epoch 10, batch 100, loss[loss=0.1998, simple_loss=0.2766, pruned_loss=0.06148, over 7373.00 frames.], tot_loss[loss=0.2224, simple_loss=0.2981, pruned_loss=0.07334, over 559712.63 frames.], batch size: 20, lr: 1.58e-03 +2022-05-27 15:54:10,839 INFO [train.py:823] (3/4) Epoch 10, batch 150, loss[loss=0.2237, simple_loss=0.303, pruned_loss=0.07218, over 7282.00 frames.], tot_loss[loss=0.2259, simple_loss=0.3019, pruned_loss=0.07497, over 749634.14 frames.], batch size: 20, lr: 1.58e-03 +2022-05-27 15:54:50,513 INFO [train.py:823] (3/4) Epoch 10, batch 200, loss[loss=0.2202, simple_loss=0.3079, pruned_loss=0.06629, over 7298.00 frames.], tot_loss[loss=0.223, simple_loss=0.2992, pruned_loss=0.0734, over 901205.95 frames.], batch size: 21, lr: 1.57e-03 +2022-05-27 15:55:29,318 INFO [train.py:823] (3/4) Epoch 10, batch 250, loss[loss=0.241, simple_loss=0.3037, pruned_loss=0.08917, over 7381.00 frames.], tot_loss[loss=0.2236, simple_loss=0.2996, pruned_loss=0.07379, over 1017309.78 frames.], batch size: 21, lr: 1.57e-03 +2022-05-27 15:56:08,347 INFO [train.py:823] (3/4) Epoch 10, batch 300, loss[loss=0.239, simple_loss=0.3076, pruned_loss=0.08523, over 7014.00 frames.], tot_loss[loss=0.2235, simple_loss=0.2995, pruned_loss=0.07374, over 1108771.41 frames.], batch size: 26, lr: 1.57e-03 +2022-05-27 15:56:47,641 INFO [train.py:823] (3/4) Epoch 10, batch 350, loss[loss=0.1879, simple_loss=0.2636, pruned_loss=0.05606, over 6776.00 frames.], tot_loss[loss=0.2233, simple_loss=0.2989, pruned_loss=0.07385, over 1175441.32 frames.], batch size: 15, lr: 1.56e-03 +2022-05-27 15:57:26,952 INFO [train.py:823] (3/4) Epoch 10, batch 400, loss[loss=0.2511, simple_loss=0.3175, pruned_loss=0.09237, over 7089.00 frames.], tot_loss[loss=0.2259, simple_loss=0.3011, pruned_loss=0.0753, over 1224897.08 frames.], batch size: 19, lr: 1.56e-03 +2022-05-27 15:58:06,028 INFO [train.py:823] (3/4) Epoch 10, batch 450, loss[loss=0.217, simple_loss=0.2938, pruned_loss=0.07013, over 7274.00 frames.], tot_loss[loss=0.2267, simple_loss=0.3014, pruned_loss=0.07601, over 1266077.79 frames.], batch size: 20, lr: 1.56e-03 +2022-05-27 15:58:45,245 INFO [train.py:823] (3/4) Epoch 10, batch 500, loss[loss=0.2015, simple_loss=0.278, pruned_loss=0.06251, over 7280.00 frames.], tot_loss[loss=0.2262, simple_loss=0.301, pruned_loss=0.0757, over 1298862.51 frames.], batch size: 20, lr: 1.55e-03 +2022-05-27 15:59:24,009 INFO [train.py:823] (3/4) Epoch 10, batch 550, loss[loss=0.2094, simple_loss=0.2846, pruned_loss=0.06712, over 7112.00 frames.], tot_loss[loss=0.2259, simple_loss=0.3008, pruned_loss=0.07552, over 1328526.92 frames.], batch size: 18, lr: 1.55e-03 +2022-05-27 16:00:03,462 INFO [train.py:823] (3/4) Epoch 10, batch 600, loss[loss=0.1937, simple_loss=0.2771, pruned_loss=0.05514, over 7301.00 frames.], tot_loss[loss=0.2272, simple_loss=0.3017, pruned_loss=0.07638, over 1352695.22 frames.], batch size: 19, lr: 1.55e-03 +2022-05-27 16:00:42,545 INFO [train.py:823] (3/4) Epoch 10, batch 650, loss[loss=0.2249, simple_loss=0.3037, pruned_loss=0.07305, over 7184.00 frames.], tot_loss[loss=0.2251, simple_loss=0.2999, pruned_loss=0.0751, over 1370493.85 frames.], batch size: 21, lr: 1.54e-03 +2022-05-27 16:01:22,303 INFO [train.py:823] (3/4) Epoch 10, batch 700, loss[loss=0.2487, simple_loss=0.3021, pruned_loss=0.09766, over 7021.00 frames.], tot_loss[loss=0.2237, simple_loss=0.2991, pruned_loss=0.07412, over 1384990.70 frames.], batch size: 16, lr: 1.54e-03 +2022-05-27 16:02:01,177 INFO [train.py:823] (3/4) Epoch 10, batch 750, loss[loss=0.2122, simple_loss=0.2793, pruned_loss=0.07258, over 7185.00 frames.], tot_loss[loss=0.2241, simple_loss=0.2997, pruned_loss=0.0743, over 1393061.40 frames.], batch size: 18, lr: 1.54e-03 +2022-05-27 16:02:40,234 INFO [train.py:823] (3/4) Epoch 10, batch 800, loss[loss=0.2348, simple_loss=0.3034, pruned_loss=0.08306, over 7220.00 frames.], tot_loss[loss=0.224, simple_loss=0.2999, pruned_loss=0.07409, over 1399304.47 frames.], batch size: 25, lr: 1.53e-03 +2022-05-27 16:03:19,625 INFO [train.py:823] (3/4) Epoch 10, batch 850, loss[loss=0.2065, simple_loss=0.2939, pruned_loss=0.05955, over 7157.00 frames.], tot_loss[loss=0.2225, simple_loss=0.2989, pruned_loss=0.07307, over 1405275.05 frames.], batch size: 22, lr: 1.53e-03 +2022-05-27 16:03:59,225 INFO [train.py:823] (3/4) Epoch 10, batch 900, loss[loss=0.1887, simple_loss=0.2656, pruned_loss=0.05585, over 7268.00 frames.], tot_loss[loss=0.2223, simple_loss=0.2986, pruned_loss=0.07305, over 1405714.79 frames.], batch size: 16, lr: 1.53e-03 +2022-05-27 16:04:53,507 INFO [train.py:823] (3/4) Epoch 11, batch 0, loss[loss=0.2057, simple_loss=0.2837, pruned_loss=0.06392, over 7086.00 frames.], tot_loss[loss=0.2057, simple_loss=0.2837, pruned_loss=0.06392, over 7086.00 frames.], batch size: 19, lr: 1.47e-03 +2022-05-27 16:05:32,607 INFO [train.py:823] (3/4) Epoch 11, batch 50, loss[loss=0.2021, simple_loss=0.2848, pruned_loss=0.05972, over 6264.00 frames.], tot_loss[loss=0.2227, simple_loss=0.2999, pruned_loss=0.07279, over 323072.97 frames.], batch size: 34, lr: 1.47e-03 +2022-05-27 16:06:11,612 INFO [train.py:823] (3/4) Epoch 11, batch 100, loss[loss=0.1743, simple_loss=0.2511, pruned_loss=0.04877, over 7157.00 frames.], tot_loss[loss=0.2175, simple_loss=0.2948, pruned_loss=0.07016, over 569847.79 frames.], batch size: 17, lr: 1.46e-03 +2022-05-27 16:06:50,821 INFO [train.py:823] (3/4) Epoch 11, batch 150, loss[loss=0.2243, simple_loss=0.3099, pruned_loss=0.06936, over 7214.00 frames.], tot_loss[loss=0.2167, simple_loss=0.2941, pruned_loss=0.06963, over 761191.95 frames.], batch size: 24, lr: 1.46e-03 +2022-05-27 16:07:29,185 INFO [train.py:823] (3/4) Epoch 11, batch 200, loss[loss=0.2213, simple_loss=0.2975, pruned_loss=0.07251, over 7098.00 frames.], tot_loss[loss=0.218, simple_loss=0.2961, pruned_loss=0.06996, over 900908.74 frames.], batch size: 19, lr: 1.46e-03 +2022-05-27 16:08:08,813 INFO [train.py:823] (3/4) Epoch 11, batch 250, loss[loss=0.2113, simple_loss=0.2892, pruned_loss=0.06669, over 7092.00 frames.], tot_loss[loss=0.2168, simple_loss=0.2946, pruned_loss=0.06948, over 1014003.68 frames.], batch size: 18, lr: 1.45e-03 +2022-05-27 16:08:50,577 INFO [train.py:823] (3/4) Epoch 11, batch 300, loss[loss=0.2078, simple_loss=0.3026, pruned_loss=0.05645, over 7196.00 frames.], tot_loss[loss=0.216, simple_loss=0.294, pruned_loss=0.06899, over 1105000.59 frames.], batch size: 25, lr: 1.45e-03 +2022-05-27 16:09:29,807 INFO [train.py:823] (3/4) Epoch 11, batch 350, loss[loss=0.2167, simple_loss=0.2937, pruned_loss=0.06985, over 7224.00 frames.], tot_loss[loss=0.2172, simple_loss=0.2948, pruned_loss=0.06977, over 1176646.16 frames.], batch size: 25, lr: 1.45e-03 +2022-05-27 16:10:08,817 INFO [train.py:823] (3/4) Epoch 11, batch 400, loss[loss=0.1976, simple_loss=0.2923, pruned_loss=0.0514, over 7090.00 frames.], tot_loss[loss=0.2174, simple_loss=0.2951, pruned_loss=0.06987, over 1231138.44 frames.], batch size: 19, lr: 1.44e-03 +2022-05-27 16:10:48,027 INFO [train.py:823] (3/4) Epoch 11, batch 450, loss[loss=0.2276, simple_loss=0.3132, pruned_loss=0.07105, over 7282.00 frames.], tot_loss[loss=0.2178, simple_loss=0.2954, pruned_loss=0.07008, over 1269701.89 frames.], batch size: 22, lr: 1.44e-03 +2022-05-27 16:11:28,185 INFO [train.py:823] (3/4) Epoch 11, batch 500, loss[loss=0.2126, simple_loss=0.2935, pruned_loss=0.06587, over 6564.00 frames.], tot_loss[loss=0.2183, simple_loss=0.2962, pruned_loss=0.07025, over 1304396.67 frames.], batch size: 34, lr: 1.44e-03 +2022-05-27 16:12:07,659 INFO [train.py:823] (3/4) Epoch 11, batch 550, loss[loss=0.1925, simple_loss=0.2813, pruned_loss=0.05186, over 7432.00 frames.], tot_loss[loss=0.2187, simple_loss=0.2964, pruned_loss=0.07048, over 1332095.51 frames.], batch size: 22, lr: 1.44e-03 +2022-05-27 16:12:46,628 INFO [train.py:823] (3/4) Epoch 11, batch 600, loss[loss=0.1965, simple_loss=0.2752, pruned_loss=0.05892, over 7385.00 frames.], tot_loss[loss=0.2186, simple_loss=0.2963, pruned_loss=0.07045, over 1350770.78 frames.], batch size: 19, lr: 1.43e-03 +2022-05-27 16:13:26,120 INFO [train.py:823] (3/4) Epoch 11, batch 650, loss[loss=0.2039, simple_loss=0.2745, pruned_loss=0.06662, over 7310.00 frames.], tot_loss[loss=0.218, simple_loss=0.2956, pruned_loss=0.07019, over 1368899.19 frames.], batch size: 18, lr: 1.43e-03 +2022-05-27 16:14:04,502 INFO [train.py:823] (3/4) Epoch 11, batch 700, loss[loss=0.2628, simple_loss=0.313, pruned_loss=0.1063, over 7141.00 frames.], tot_loss[loss=0.2186, simple_loss=0.296, pruned_loss=0.07058, over 1382193.97 frames.], batch size: 17, lr: 1.43e-03 +2022-05-27 16:14:45,072 INFO [train.py:823] (3/4) Epoch 11, batch 750, loss[loss=0.188, simple_loss=0.2617, pruned_loss=0.05718, over 7309.00 frames.], tot_loss[loss=0.2179, simple_loss=0.2947, pruned_loss=0.0706, over 1390504.11 frames.], batch size: 17, lr: 1.42e-03 +2022-05-27 16:15:23,554 INFO [train.py:823] (3/4) Epoch 11, batch 800, loss[loss=0.175, simple_loss=0.2659, pruned_loss=0.04207, over 7199.00 frames.], tot_loss[loss=0.2177, simple_loss=0.2949, pruned_loss=0.07031, over 1395592.17 frames.], batch size: 19, lr: 1.42e-03 +2022-05-27 16:16:03,301 INFO [train.py:823] (3/4) Epoch 11, batch 850, loss[loss=0.2051, simple_loss=0.2849, pruned_loss=0.06264, over 7106.00 frames.], tot_loss[loss=0.2176, simple_loss=0.2947, pruned_loss=0.07027, over 1399219.98 frames.], batch size: 20, lr: 1.42e-03 +2022-05-27 16:16:42,234 INFO [train.py:823] (3/4) Epoch 11, batch 900, loss[loss=0.1958, simple_loss=0.2621, pruned_loss=0.06471, over 6789.00 frames.], tot_loss[loss=0.218, simple_loss=0.2957, pruned_loss=0.07013, over 1398853.36 frames.], batch size: 15, lr: 1.42e-03 +2022-05-27 16:17:33,002 INFO [train.py:823] (3/4) Epoch 12, batch 0, loss[loss=0.1967, simple_loss=0.2718, pruned_loss=0.06079, over 7297.00 frames.], tot_loss[loss=0.1967, simple_loss=0.2718, pruned_loss=0.06079, over 7297.00 frames.], batch size: 17, lr: 1.36e-03 +2022-05-27 16:18:12,434 INFO [train.py:823] (3/4) Epoch 12, batch 50, loss[loss=0.2344, simple_loss=0.3104, pruned_loss=0.07923, over 7250.00 frames.], tot_loss[loss=0.2134, simple_loss=0.2915, pruned_loss=0.06766, over 317595.63 frames.], batch size: 24, lr: 1.36e-03 +2022-05-27 16:18:51,721 INFO [train.py:823] (3/4) Epoch 12, batch 100, loss[loss=0.2466, simple_loss=0.3126, pruned_loss=0.09035, over 7151.00 frames.], tot_loss[loss=0.2151, simple_loss=0.2938, pruned_loss=0.06821, over 561629.40 frames.], batch size: 23, lr: 1.36e-03 +2022-05-27 16:19:30,795 INFO [train.py:823] (3/4) Epoch 12, batch 150, loss[loss=0.2261, simple_loss=0.2993, pruned_loss=0.07646, over 7281.00 frames.], tot_loss[loss=0.2136, simple_loss=0.2928, pruned_loss=0.06715, over 752145.04 frames.], batch size: 20, lr: 1.36e-03 +2022-05-27 16:20:10,261 INFO [train.py:823] (3/4) Epoch 12, batch 200, loss[loss=0.21, simple_loss=0.2891, pruned_loss=0.06545, over 6788.00 frames.], tot_loss[loss=0.2125, simple_loss=0.2924, pruned_loss=0.06635, over 898324.38 frames.], batch size: 15, lr: 1.35e-03 +2022-05-27 16:20:49,273 INFO [train.py:823] (3/4) Epoch 12, batch 250, loss[loss=0.2411, simple_loss=0.3206, pruned_loss=0.08084, over 7006.00 frames.], tot_loss[loss=0.2128, simple_loss=0.2926, pruned_loss=0.06651, over 1015413.95 frames.], batch size: 26, lr: 1.35e-03 +2022-05-27 16:21:28,569 INFO [train.py:823] (3/4) Epoch 12, batch 300, loss[loss=0.231, simple_loss=0.3096, pruned_loss=0.07615, over 7192.00 frames.], tot_loss[loss=0.2144, simple_loss=0.2937, pruned_loss=0.0676, over 1101930.22 frames.], batch size: 19, lr: 1.35e-03 +2022-05-27 16:22:07,738 INFO [train.py:823] (3/4) Epoch 12, batch 350, loss[loss=0.205, simple_loss=0.3009, pruned_loss=0.05453, over 7327.00 frames.], tot_loss[loss=0.2137, simple_loss=0.2935, pruned_loss=0.06695, over 1176153.07 frames.], batch size: 23, lr: 1.35e-03 +2022-05-27 16:22:46,806 INFO [train.py:823] (3/4) Epoch 12, batch 400, loss[loss=0.211, simple_loss=0.2967, pruned_loss=0.06261, over 7077.00 frames.], tot_loss[loss=0.2133, simple_loss=0.2931, pruned_loss=0.06676, over 1230958.68 frames.], batch size: 29, lr: 1.34e-03 +2022-05-27 16:23:26,141 INFO [train.py:823] (3/4) Epoch 12, batch 450, loss[loss=0.2597, simple_loss=0.3289, pruned_loss=0.09519, over 7375.00 frames.], tot_loss[loss=0.2136, simple_loss=0.2936, pruned_loss=0.06685, over 1272894.33 frames.], batch size: 20, lr: 1.34e-03 +2022-05-27 16:24:05,952 INFO [train.py:823] (3/4) Epoch 12, batch 500, loss[loss=0.1793, simple_loss=0.2667, pruned_loss=0.04592, over 7275.00 frames.], tot_loss[loss=0.2114, simple_loss=0.2913, pruned_loss=0.06575, over 1310495.71 frames.], batch size: 20, lr: 1.34e-03 +2022-05-27 16:24:44,917 INFO [train.py:823] (3/4) Epoch 12, batch 550, loss[loss=0.2282, simple_loss=0.2911, pruned_loss=0.0827, over 7039.00 frames.], tot_loss[loss=0.2129, simple_loss=0.2926, pruned_loss=0.06667, over 1338124.19 frames.], batch size: 17, lr: 1.34e-03 +2022-05-27 16:25:24,239 INFO [train.py:823] (3/4) Epoch 12, batch 600, loss[loss=0.1957, simple_loss=0.2672, pruned_loss=0.06208, over 7220.00 frames.], tot_loss[loss=0.2128, simple_loss=0.2924, pruned_loss=0.06662, over 1358490.85 frames.], batch size: 16, lr: 1.33e-03 +2022-05-27 16:26:03,083 INFO [train.py:823] (3/4) Epoch 12, batch 650, loss[loss=0.2227, simple_loss=0.3053, pruned_loss=0.07002, over 7289.00 frames.], tot_loss[loss=0.2126, simple_loss=0.2923, pruned_loss=0.06648, over 1371289.02 frames.], batch size: 21, lr: 1.33e-03 +2022-05-27 16:26:41,934 INFO [train.py:823] (3/4) Epoch 12, batch 700, loss[loss=0.2418, simple_loss=0.3114, pruned_loss=0.08608, over 7270.00 frames.], tot_loss[loss=0.2129, simple_loss=0.2929, pruned_loss=0.06651, over 1381761.79 frames.], batch size: 20, lr: 1.33e-03 +2022-05-27 16:27:21,123 INFO [train.py:823] (3/4) Epoch 12, batch 750, loss[loss=0.2334, simple_loss=0.3171, pruned_loss=0.07483, over 7289.00 frames.], tot_loss[loss=0.2126, simple_loss=0.2925, pruned_loss=0.06629, over 1387591.69 frames.], batch size: 22, lr: 1.33e-03 +2022-05-27 16:28:00,226 INFO [train.py:823] (3/4) Epoch 12, batch 800, loss[loss=0.2143, simple_loss=0.3005, pruned_loss=0.06409, over 7301.00 frames.], tot_loss[loss=0.2115, simple_loss=0.2918, pruned_loss=0.06561, over 1394398.06 frames.], batch size: 22, lr: 1.32e-03 +2022-05-27 16:28:38,849 INFO [train.py:823] (3/4) Epoch 12, batch 850, loss[loss=0.2172, simple_loss=0.2926, pruned_loss=0.07093, over 7194.00 frames.], tot_loss[loss=0.212, simple_loss=0.2924, pruned_loss=0.06587, over 1399940.00 frames.], batch size: 18, lr: 1.32e-03 +2022-05-27 16:29:17,849 INFO [train.py:823] (3/4) Epoch 12, batch 900, loss[loss=0.2194, simple_loss=0.2875, pruned_loss=0.07566, over 7101.00 frames.], tot_loss[loss=0.2116, simple_loss=0.2915, pruned_loss=0.06583, over 1395899.94 frames.], batch size: 19, lr: 1.32e-03 +2022-05-27 16:30:08,484 INFO [train.py:823] (3/4) Epoch 13, batch 0, loss[loss=0.2306, simple_loss=0.3106, pruned_loss=0.07526, over 7168.00 frames.], tot_loss[loss=0.2306, simple_loss=0.3106, pruned_loss=0.07526, over 7168.00 frames.], batch size: 22, lr: 1.27e-03 +2022-05-27 16:30:48,287 INFO [train.py:823] (3/4) Epoch 13, batch 50, loss[loss=0.1744, simple_loss=0.2613, pruned_loss=0.04373, over 7290.00 frames.], tot_loss[loss=0.2092, simple_loss=0.2869, pruned_loss=0.0658, over 318199.23 frames.], batch size: 19, lr: 1.27e-03 +2022-05-27 16:31:28,650 INFO [train.py:823] (3/4) Epoch 13, batch 100, loss[loss=0.1943, simple_loss=0.2685, pruned_loss=0.06003, over 7300.00 frames.], tot_loss[loss=0.209, simple_loss=0.2885, pruned_loss=0.06474, over 561439.47 frames.], batch size: 18, lr: 1.27e-03 +2022-05-27 16:32:11,943 INFO [train.py:823] (3/4) Epoch 13, batch 150, loss[loss=0.1962, simple_loss=0.2669, pruned_loss=0.06272, over 7397.00 frames.], tot_loss[loss=0.2088, simple_loss=0.289, pruned_loss=0.06426, over 751425.58 frames.], batch size: 19, lr: 1.26e-03 +2022-05-27 16:32:57,616 INFO [train.py:823] (3/4) Epoch 13, batch 200, loss[loss=0.2094, simple_loss=0.2731, pruned_loss=0.07282, over 7423.00 frames.], tot_loss[loss=0.2077, simple_loss=0.2883, pruned_loss=0.06359, over 903217.42 frames.], batch size: 18, lr: 1.26e-03 +2022-05-27 16:33:37,111 INFO [train.py:823] (3/4) Epoch 13, batch 250, loss[loss=0.2355, simple_loss=0.3148, pruned_loss=0.0781, over 7166.00 frames.], tot_loss[loss=0.2072, simple_loss=0.2879, pruned_loss=0.06322, over 1016684.36 frames.], batch size: 22, lr: 1.26e-03 +2022-05-27 16:34:18,249 INFO [train.py:823] (3/4) Epoch 13, batch 300, loss[loss=0.1818, simple_loss=0.2572, pruned_loss=0.05323, over 7284.00 frames.], tot_loss[loss=0.2067, simple_loss=0.287, pruned_loss=0.06318, over 1110664.92 frames.], batch size: 17, lr: 1.26e-03 +2022-05-27 16:34:57,115 INFO [train.py:823] (3/4) Epoch 13, batch 350, loss[loss=0.1983, simple_loss=0.2869, pruned_loss=0.05491, over 6526.00 frames.], tot_loss[loss=0.2072, simple_loss=0.2879, pruned_loss=0.06321, over 1176929.54 frames.], batch size: 34, lr: 1.26e-03 +2022-05-27 16:35:36,363 INFO [train.py:823] (3/4) Epoch 13, batch 400, loss[loss=0.1998, simple_loss=0.2864, pruned_loss=0.05661, over 6968.00 frames.], tot_loss[loss=0.2067, simple_loss=0.2876, pruned_loss=0.0629, over 1230610.96 frames.], batch size: 26, lr: 1.25e-03 +2022-05-27 16:36:15,587 INFO [train.py:823] (3/4) Epoch 13, batch 450, loss[loss=0.2033, simple_loss=0.2891, pruned_loss=0.05875, over 6914.00 frames.], tot_loss[loss=0.2065, simple_loss=0.2873, pruned_loss=0.06284, over 1268243.26 frames.], batch size: 29, lr: 1.25e-03 +2022-05-27 16:36:55,153 INFO [train.py:823] (3/4) Epoch 13, batch 500, loss[loss=0.1723, simple_loss=0.2681, pruned_loss=0.03827, over 6868.00 frames.], tot_loss[loss=0.2057, simple_loss=0.2866, pruned_loss=0.06237, over 1301197.40 frames.], batch size: 29, lr: 1.25e-03 +2022-05-27 16:37:34,280 INFO [train.py:823] (3/4) Epoch 13, batch 550, loss[loss=0.1896, simple_loss=0.2783, pruned_loss=0.05046, over 7285.00 frames.], tot_loss[loss=0.2065, simple_loss=0.2868, pruned_loss=0.06313, over 1322786.18 frames.], batch size: 19, lr: 1.25e-03 +2022-05-27 16:38:13,504 INFO [train.py:823] (3/4) Epoch 13, batch 600, loss[loss=0.2155, simple_loss=0.3014, pruned_loss=0.0648, over 7275.00 frames.], tot_loss[loss=0.2074, simple_loss=0.2881, pruned_loss=0.0633, over 1344920.50 frames.], batch size: 20, lr: 1.24e-03 +2022-05-27 16:38:53,785 INFO [train.py:823] (3/4) Epoch 13, batch 650, loss[loss=0.1804, simple_loss=0.2711, pruned_loss=0.04487, over 7202.00 frames.], tot_loss[loss=0.2065, simple_loss=0.2876, pruned_loss=0.06275, over 1361114.49 frames.], batch size: 19, lr: 1.24e-03 +2022-05-27 16:39:33,152 INFO [train.py:823] (3/4) Epoch 13, batch 700, loss[loss=0.1934, simple_loss=0.2656, pruned_loss=0.06055, over 7035.00 frames.], tot_loss[loss=0.2063, simple_loss=0.2876, pruned_loss=0.0625, over 1370797.89 frames.], batch size: 17, lr: 1.24e-03 +2022-05-27 16:40:12,578 INFO [train.py:823] (3/4) Epoch 13, batch 750, loss[loss=0.2143, simple_loss=0.2927, pruned_loss=0.06794, over 6936.00 frames.], tot_loss[loss=0.2053, simple_loss=0.2877, pruned_loss=0.06146, over 1378372.50 frames.], batch size: 29, lr: 1.24e-03 +2022-05-27 16:40:51,208 INFO [train.py:823] (3/4) Epoch 13, batch 800, loss[loss=0.2289, simple_loss=0.3078, pruned_loss=0.07499, over 7165.00 frames.], tot_loss[loss=0.2063, simple_loss=0.2881, pruned_loss=0.06223, over 1385586.90 frames.], batch size: 23, lr: 1.24e-03 +2022-05-27 16:41:30,513 INFO [train.py:823] (3/4) Epoch 13, batch 850, loss[loss=0.2216, simple_loss=0.3026, pruned_loss=0.07031, over 7278.00 frames.], tot_loss[loss=0.2064, simple_loss=0.288, pruned_loss=0.06236, over 1395866.36 frames.], batch size: 20, lr: 1.23e-03 +2022-05-27 16:42:09,490 INFO [train.py:823] (3/4) Epoch 13, batch 900, loss[loss=0.1693, simple_loss=0.2659, pruned_loss=0.03634, over 7298.00 frames.], tot_loss[loss=0.2058, simple_loss=0.288, pruned_loss=0.06183, over 1395548.05 frames.], batch size: 19, lr: 1.23e-03 +2022-05-27 16:42:48,664 INFO [train.py:823] (3/4) Epoch 13, batch 950, loss[loss=0.2246, simple_loss=0.287, pruned_loss=0.08105, over 7021.00 frames.], tot_loss[loss=0.2059, simple_loss=0.288, pruned_loss=0.06185, over 1395243.64 frames.], batch size: 16, lr: 1.23e-03 +2022-05-27 16:43:01,795 INFO [train.py:823] (3/4) Epoch 14, batch 0, loss[loss=0.209, simple_loss=0.2953, pruned_loss=0.06131, over 7294.00 frames.], tot_loss[loss=0.209, simple_loss=0.2953, pruned_loss=0.06131, over 7294.00 frames.], batch size: 22, lr: 1.19e-03 +2022-05-27 16:43:41,493 INFO [train.py:823] (3/4) Epoch 14, batch 50, loss[loss=0.2307, simple_loss=0.317, pruned_loss=0.07215, over 7216.00 frames.], tot_loss[loss=0.2003, simple_loss=0.2836, pruned_loss=0.05844, over 324572.54 frames.], batch size: 25, lr: 1.19e-03 +2022-05-27 16:44:20,910 INFO [train.py:823] (3/4) Epoch 14, batch 100, loss[loss=0.1795, simple_loss=0.2741, pruned_loss=0.04249, over 7253.00 frames.], tot_loss[loss=0.2017, simple_loss=0.2849, pruned_loss=0.05919, over 570366.38 frames.], batch size: 24, lr: 1.19e-03 +2022-05-27 16:44:59,977 INFO [train.py:823] (3/4) Epoch 14, batch 150, loss[loss=0.199, simple_loss=0.2927, pruned_loss=0.05259, over 7279.00 frames.], tot_loss[loss=0.2021, simple_loss=0.2855, pruned_loss=0.05936, over 754716.59 frames.], batch size: 21, lr: 1.18e-03 +2022-05-27 16:45:39,652 INFO [train.py:823] (3/4) Epoch 14, batch 200, loss[loss=0.2018, simple_loss=0.3042, pruned_loss=0.04974, over 7370.00 frames.], tot_loss[loss=0.2033, simple_loss=0.2867, pruned_loss=0.05992, over 901183.52 frames.], batch size: 21, lr: 1.18e-03 +2022-05-27 16:46:18,488 INFO [train.py:823] (3/4) Epoch 14, batch 250, loss[loss=0.1975, simple_loss=0.2688, pruned_loss=0.06313, over 7298.00 frames.], tot_loss[loss=0.2004, simple_loss=0.2837, pruned_loss=0.05861, over 1019225.37 frames.], batch size: 19, lr: 1.18e-03 +2022-05-27 16:46:57,749 INFO [train.py:823] (3/4) Epoch 14, batch 300, loss[loss=0.2549, simple_loss=0.3265, pruned_loss=0.09162, over 6273.00 frames.], tot_loss[loss=0.2026, simple_loss=0.2852, pruned_loss=0.06, over 1099203.83 frames.], batch size: 34, lr: 1.18e-03 +2022-05-27 16:47:37,017 INFO [train.py:823] (3/4) Epoch 14, batch 350, loss[loss=0.1484, simple_loss=0.2401, pruned_loss=0.02832, over 7290.00 frames.], tot_loss[loss=0.2041, simple_loss=0.2869, pruned_loss=0.06062, over 1176006.43 frames.], batch size: 19, lr: 1.18e-03 +2022-05-27 16:48:16,002 INFO [train.py:823] (3/4) Epoch 14, batch 400, loss[loss=0.238, simple_loss=0.3094, pruned_loss=0.08329, over 7289.00 frames.], tot_loss[loss=0.2038, simple_loss=0.2866, pruned_loss=0.06049, over 1231445.86 frames.], batch size: 19, lr: 1.17e-03 +2022-05-27 16:48:54,599 INFO [train.py:823] (3/4) Epoch 14, batch 450, loss[loss=0.2119, simple_loss=0.2759, pruned_loss=0.07392, over 7090.00 frames.], tot_loss[loss=0.2039, simple_loss=0.2866, pruned_loss=0.06061, over 1269770.56 frames.], batch size: 18, lr: 1.17e-03 +2022-05-27 16:49:33,688 INFO [train.py:823] (3/4) Epoch 14, batch 500, loss[loss=0.1891, simple_loss=0.279, pruned_loss=0.04959, over 7180.00 frames.], tot_loss[loss=0.2025, simple_loss=0.2854, pruned_loss=0.05981, over 1304416.29 frames.], batch size: 21, lr: 1.17e-03 +2022-05-27 16:50:12,834 INFO [train.py:823] (3/4) Epoch 14, batch 550, loss[loss=0.2083, simple_loss=0.285, pruned_loss=0.0658, over 7227.00 frames.], tot_loss[loss=0.2015, simple_loss=0.2844, pruned_loss=0.05923, over 1334211.99 frames.], batch size: 25, lr: 1.17e-03 +2022-05-27 16:50:52,451 INFO [train.py:823] (3/4) Epoch 14, batch 600, loss[loss=0.2089, simple_loss=0.283, pruned_loss=0.06741, over 7390.00 frames.], tot_loss[loss=0.1994, simple_loss=0.2818, pruned_loss=0.05849, over 1355015.55 frames.], batch size: 19, lr: 1.17e-03 +2022-05-27 16:51:31,550 INFO [train.py:823] (3/4) Epoch 14, batch 650, loss[loss=0.1517, simple_loss=0.2327, pruned_loss=0.03541, over 7285.00 frames.], tot_loss[loss=0.1991, simple_loss=0.2818, pruned_loss=0.05818, over 1369053.99 frames.], batch size: 17, lr: 1.16e-03 +2022-05-27 16:52:10,495 INFO [train.py:823] (3/4) Epoch 14, batch 700, loss[loss=0.202, simple_loss=0.2932, pruned_loss=0.05542, over 7279.00 frames.], tot_loss[loss=0.1993, simple_loss=0.2822, pruned_loss=0.0582, over 1377623.78 frames.], batch size: 21, lr: 1.16e-03 +2022-05-27 16:52:49,098 INFO [train.py:823] (3/4) Epoch 14, batch 750, loss[loss=0.2159, simple_loss=0.2993, pruned_loss=0.06626, over 7116.00 frames.], tot_loss[loss=0.1987, simple_loss=0.2821, pruned_loss=0.05772, over 1387470.45 frames.], batch size: 20, lr: 1.16e-03 +2022-05-27 16:53:28,682 INFO [train.py:823] (3/4) Epoch 14, batch 800, loss[loss=0.203, simple_loss=0.2834, pruned_loss=0.06131, over 7199.00 frames.], tot_loss[loss=0.1999, simple_loss=0.2829, pruned_loss=0.05846, over 1393438.20 frames.], batch size: 19, lr: 1.16e-03 +2022-05-27 16:54:09,169 INFO [train.py:823] (3/4) Epoch 14, batch 850, loss[loss=0.1972, simple_loss=0.2848, pruned_loss=0.05481, over 7292.00 frames.], tot_loss[loss=0.2002, simple_loss=0.283, pruned_loss=0.05863, over 1396719.45 frames.], batch size: 20, lr: 1.16e-03 +2022-05-27 16:54:48,415 INFO [train.py:823] (3/4) Epoch 14, batch 900, loss[loss=0.1929, simple_loss=0.278, pruned_loss=0.05388, over 7016.00 frames.], tot_loss[loss=0.1998, simple_loss=0.2826, pruned_loss=0.05848, over 1400429.79 frames.], batch size: 17, lr: 1.15e-03 +2022-05-27 16:55:39,859 INFO [train.py:823] (3/4) Epoch 15, batch 0, loss[loss=0.1631, simple_loss=0.2428, pruned_loss=0.04176, over 7202.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2428, pruned_loss=0.04176, over 7202.00 frames.], batch size: 19, lr: 1.12e-03 +2022-05-27 16:56:18,980 INFO [train.py:823] (3/4) Epoch 15, batch 50, loss[loss=0.2156, simple_loss=0.292, pruned_loss=0.06958, over 7192.00 frames.], tot_loss[loss=0.2028, simple_loss=0.2859, pruned_loss=0.05987, over 319303.33 frames.], batch size: 18, lr: 1.12e-03 +2022-05-27 16:56:57,715 INFO [train.py:823] (3/4) Epoch 15, batch 100, loss[loss=0.2237, simple_loss=0.3154, pruned_loss=0.06598, over 7419.00 frames.], tot_loss[loss=0.1978, simple_loss=0.2812, pruned_loss=0.0572, over 559572.48 frames.], batch size: 22, lr: 1.11e-03 +2022-05-27 16:57:38,794 INFO [train.py:823] (3/4) Epoch 15, batch 150, loss[loss=0.1865, simple_loss=0.2553, pruned_loss=0.05883, over 7309.00 frames.], tot_loss[loss=0.1964, simple_loss=0.2789, pruned_loss=0.05698, over 751715.78 frames.], batch size: 17, lr: 1.11e-03 +2022-05-27 16:58:17,786 INFO [train.py:823] (3/4) Epoch 15, batch 200, loss[loss=0.2656, simple_loss=0.3324, pruned_loss=0.0994, over 7155.00 frames.], tot_loss[loss=0.199, simple_loss=0.2811, pruned_loss=0.05847, over 897931.09 frames.], batch size: 23, lr: 1.11e-03 +2022-05-27 16:58:57,131 INFO [train.py:823] (3/4) Epoch 15, batch 250, loss[loss=0.224, simple_loss=0.308, pruned_loss=0.06999, over 6734.00 frames.], tot_loss[loss=0.198, simple_loss=0.2807, pruned_loss=0.05767, over 1014464.89 frames.], batch size: 34, lr: 1.11e-03 +2022-05-27 16:59:36,319 INFO [train.py:823] (3/4) Epoch 15, batch 300, loss[loss=0.2653, simple_loss=0.3201, pruned_loss=0.1053, over 7188.00 frames.], tot_loss[loss=0.1984, simple_loss=0.2808, pruned_loss=0.05802, over 1103522.72 frames.], batch size: 18, lr: 1.11e-03 +2022-05-27 17:00:15,479 INFO [train.py:823] (3/4) Epoch 15, batch 350, loss[loss=0.1786, simple_loss=0.2686, pruned_loss=0.04429, over 7370.00 frames.], tot_loss[loss=0.198, simple_loss=0.2812, pruned_loss=0.05735, over 1175995.39 frames.], batch size: 20, lr: 1.10e-03 +2022-05-27 17:00:54,492 INFO [train.py:823] (3/4) Epoch 15, batch 400, loss[loss=0.1738, simple_loss=0.2577, pruned_loss=0.04496, over 7102.00 frames.], tot_loss[loss=0.1977, simple_loss=0.281, pruned_loss=0.05722, over 1227640.51 frames.], batch size: 19, lr: 1.10e-03 +2022-05-27 17:01:34,137 INFO [train.py:823] (3/4) Epoch 15, batch 450, loss[loss=0.1887, simple_loss=0.2781, pruned_loss=0.04962, over 7229.00 frames.], tot_loss[loss=0.1957, simple_loss=0.2794, pruned_loss=0.05601, over 1275428.23 frames.], batch size: 24, lr: 1.10e-03 +2022-05-27 17:02:13,069 INFO [train.py:823] (3/4) Epoch 15, batch 500, loss[loss=0.1759, simple_loss=0.2719, pruned_loss=0.03996, over 7115.00 frames.], tot_loss[loss=0.1969, simple_loss=0.2805, pruned_loss=0.05667, over 1311213.46 frames.], batch size: 20, lr: 1.10e-03 +2022-05-27 17:02:53,422 INFO [train.py:823] (3/4) Epoch 15, batch 550, loss[loss=0.1781, simple_loss=0.2682, pruned_loss=0.04404, over 7017.00 frames.], tot_loss[loss=0.1963, simple_loss=0.2798, pruned_loss=0.05639, over 1332042.52 frames.], batch size: 17, lr: 1.10e-03 +2022-05-27 17:03:32,347 INFO [train.py:823] (3/4) Epoch 15, batch 600, loss[loss=0.1854, simple_loss=0.2602, pruned_loss=0.05524, over 7291.00 frames.], tot_loss[loss=0.1973, simple_loss=0.2808, pruned_loss=0.05693, over 1355929.07 frames.], batch size: 19, lr: 1.10e-03 +2022-05-27 17:04:11,964 INFO [train.py:823] (3/4) Epoch 15, batch 650, loss[loss=0.2425, simple_loss=0.3258, pruned_loss=0.07962, over 7160.00 frames.], tot_loss[loss=0.1972, simple_loss=0.2809, pruned_loss=0.05673, over 1367567.01 frames.], batch size: 22, lr: 1.09e-03 +2022-05-27 17:04:51,130 INFO [train.py:823] (3/4) Epoch 15, batch 700, loss[loss=0.2296, simple_loss=0.312, pruned_loss=0.07355, over 6809.00 frames.], tot_loss[loss=0.197, simple_loss=0.2808, pruned_loss=0.05661, over 1382183.92 frames.], batch size: 29, lr: 1.09e-03 +2022-05-27 17:05:30,575 INFO [train.py:823] (3/4) Epoch 15, batch 750, loss[loss=0.2151, simple_loss=0.2925, pruned_loss=0.06882, over 5383.00 frames.], tot_loss[loss=0.1966, simple_loss=0.2803, pruned_loss=0.05642, over 1386221.65 frames.], batch size: 48, lr: 1.09e-03 +2022-05-27 17:06:09,176 INFO [train.py:823] (3/4) Epoch 15, batch 800, loss[loss=0.19, simple_loss=0.2753, pruned_loss=0.05235, over 7196.00 frames.], tot_loss[loss=0.1963, simple_loss=0.2806, pruned_loss=0.05602, over 1389789.18 frames.], batch size: 19, lr: 1.09e-03 +2022-05-27 17:06:48,513 INFO [train.py:823] (3/4) Epoch 15, batch 850, loss[loss=0.2218, simple_loss=0.2969, pruned_loss=0.0734, over 7203.00 frames.], tot_loss[loss=0.197, simple_loss=0.2809, pruned_loss=0.05655, over 1395412.47 frames.], batch size: 25, lr: 1.09e-03 +2022-05-27 17:07:27,485 INFO [train.py:823] (3/4) Epoch 15, batch 900, loss[loss=0.179, simple_loss=0.2762, pruned_loss=0.04091, over 7111.00 frames.], tot_loss[loss=0.1974, simple_loss=0.2816, pruned_loss=0.05658, over 1400241.06 frames.], batch size: 19, lr: 1.09e-03 +2022-05-27 17:08:06,680 INFO [train.py:823] (3/4) Epoch 15, batch 950, loss[loss=0.1935, simple_loss=0.285, pruned_loss=0.05095, over 4544.00 frames.], tot_loss[loss=0.1975, simple_loss=0.2817, pruned_loss=0.05665, over 1380435.86 frames.], batch size: 47, lr: 1.08e-03 +2022-05-27 17:08:19,706 INFO [train.py:823] (3/4) Epoch 16, batch 0, loss[loss=0.1972, simple_loss=0.2718, pruned_loss=0.06126, over 5250.00 frames.], tot_loss[loss=0.1972, simple_loss=0.2718, pruned_loss=0.06126, over 5250.00 frames.], batch size: 47, lr: 1.05e-03 +2022-05-27 17:08:58,776 INFO [train.py:823] (3/4) Epoch 16, batch 50, loss[loss=0.1706, simple_loss=0.2553, pruned_loss=0.04294, over 7023.00 frames.], tot_loss[loss=0.1933, simple_loss=0.2773, pruned_loss=0.05461, over 319222.04 frames.], batch size: 16, lr: 1.05e-03 +2022-05-27 17:09:38,411 INFO [train.py:823] (3/4) Epoch 16, batch 100, loss[loss=0.2031, simple_loss=0.2806, pruned_loss=0.06277, over 7201.00 frames.], tot_loss[loss=0.1924, simple_loss=0.2759, pruned_loss=0.05449, over 561236.57 frames.], batch size: 19, lr: 1.05e-03 +2022-05-27 17:10:17,983 INFO [train.py:823] (3/4) Epoch 16, batch 150, loss[loss=0.1975, simple_loss=0.2768, pruned_loss=0.05912, over 7392.00 frames.], tot_loss[loss=0.191, simple_loss=0.2752, pruned_loss=0.0534, over 756688.37 frames.], batch size: 19, lr: 1.05e-03 +2022-05-27 17:10:57,616 INFO [train.py:823] (3/4) Epoch 16, batch 200, loss[loss=0.2509, simple_loss=0.3224, pruned_loss=0.08972, over 7159.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2756, pruned_loss=0.05344, over 903530.97 frames.], batch size: 23, lr: 1.05e-03 +2022-05-27 17:11:36,402 INFO [train.py:823] (3/4) Epoch 16, batch 250, loss[loss=0.2052, simple_loss=0.2921, pruned_loss=0.05916, over 7224.00 frames.], tot_loss[loss=0.1914, simple_loss=0.2764, pruned_loss=0.05324, over 1012591.00 frames.], batch size: 25, lr: 1.04e-03 +2022-05-27 17:12:16,009 INFO [train.py:823] (3/4) Epoch 16, batch 300, loss[loss=0.2036, simple_loss=0.3012, pruned_loss=0.05304, over 7242.00 frames.], tot_loss[loss=0.1901, simple_loss=0.2748, pruned_loss=0.05264, over 1105483.88 frames.], batch size: 24, lr: 1.04e-03 +2022-05-27 17:12:55,442 INFO [train.py:823] (3/4) Epoch 16, batch 350, loss[loss=0.2032, simple_loss=0.2951, pruned_loss=0.05564, over 7324.00 frames.], tot_loss[loss=0.1914, simple_loss=0.2758, pruned_loss=0.0535, over 1173271.99 frames.], batch size: 23, lr: 1.04e-03 +2022-05-27 17:13:34,558 INFO [train.py:823] (3/4) Epoch 16, batch 400, loss[loss=0.2251, simple_loss=0.2895, pruned_loss=0.0804, over 7293.00 frames.], tot_loss[loss=0.1927, simple_loss=0.277, pruned_loss=0.05416, over 1228755.96 frames.], batch size: 19, lr: 1.04e-03 +2022-05-27 17:14:13,548 INFO [train.py:823] (3/4) Epoch 16, batch 450, loss[loss=0.1941, simple_loss=0.2946, pruned_loss=0.04685, over 7412.00 frames.], tot_loss[loss=0.1937, simple_loss=0.2782, pruned_loss=0.0546, over 1276700.76 frames.], batch size: 22, lr: 1.04e-03 +2022-05-27 17:14:53,398 INFO [train.py:823] (3/4) Epoch 16, batch 500, loss[loss=0.1823, simple_loss=0.2816, pruned_loss=0.04144, over 6987.00 frames.], tot_loss[loss=0.1923, simple_loss=0.277, pruned_loss=0.05378, over 1312410.02 frames.], batch size: 29, lr: 1.04e-03 +2022-05-27 17:15:32,547 INFO [train.py:823] (3/4) Epoch 16, batch 550, loss[loss=0.196, simple_loss=0.2955, pruned_loss=0.04829, over 7378.00 frames.], tot_loss[loss=0.1929, simple_loss=0.2777, pruned_loss=0.054, over 1331042.67 frames.], batch size: 21, lr: 1.03e-03 +2022-05-27 17:16:11,605 INFO [train.py:823] (3/4) Epoch 16, batch 600, loss[loss=0.165, simple_loss=0.25, pruned_loss=0.04003, over 7100.00 frames.], tot_loss[loss=0.193, simple_loss=0.2777, pruned_loss=0.0542, over 1345393.36 frames.], batch size: 19, lr: 1.03e-03 +2022-05-27 17:16:52,279 INFO [train.py:823] (3/4) Epoch 16, batch 650, loss[loss=0.1796, simple_loss=0.2661, pruned_loss=0.04652, over 6792.00 frames.], tot_loss[loss=0.1956, simple_loss=0.2798, pruned_loss=0.05573, over 1361215.06 frames.], batch size: 15, lr: 1.03e-03 +2022-05-27 17:17:31,535 INFO [train.py:823] (3/4) Epoch 16, batch 700, loss[loss=0.2083, simple_loss=0.2908, pruned_loss=0.0629, over 7292.00 frames.], tot_loss[loss=0.1947, simple_loss=0.2788, pruned_loss=0.0553, over 1371363.35 frames.], batch size: 19, lr: 1.03e-03 +2022-05-27 17:18:11,835 INFO [train.py:823] (3/4) Epoch 16, batch 750, loss[loss=0.1806, simple_loss=0.2669, pruned_loss=0.04716, over 7175.00 frames.], tot_loss[loss=0.1958, simple_loss=0.2795, pruned_loss=0.05607, over 1384167.03 frames.], batch size: 18, lr: 1.03e-03 +2022-05-27 17:18:51,434 INFO [train.py:823] (3/4) Epoch 16, batch 800, loss[loss=0.2072, simple_loss=0.2875, pruned_loss=0.06339, over 7384.00 frames.], tot_loss[loss=0.1946, simple_loss=0.279, pruned_loss=0.05503, over 1394493.79 frames.], batch size: 20, lr: 1.03e-03 +2022-05-27 17:19:30,578 INFO [train.py:823] (3/4) Epoch 16, batch 850, loss[loss=0.1887, simple_loss=0.2745, pruned_loss=0.05143, over 7187.00 frames.], tot_loss[loss=0.1941, simple_loss=0.2789, pruned_loss=0.0547, over 1401167.71 frames.], batch size: 21, lr: 1.03e-03 +2022-05-27 17:20:11,167 INFO [train.py:823] (3/4) Epoch 16, batch 900, loss[loss=0.1776, simple_loss=0.2676, pruned_loss=0.04377, over 7027.00 frames.], tot_loss[loss=0.1929, simple_loss=0.2779, pruned_loss=0.05391, over 1400611.40 frames.], batch size: 17, lr: 1.02e-03 +2022-05-27 17:21:02,502 INFO [train.py:823] (3/4) Epoch 17, batch 0, loss[loss=0.2133, simple_loss=0.3041, pruned_loss=0.06128, over 7186.00 frames.], tot_loss[loss=0.2133, simple_loss=0.3041, pruned_loss=0.06128, over 7186.00 frames.], batch size: 21, lr: 9.94e-04 +2022-05-27 17:21:42,037 INFO [train.py:823] (3/4) Epoch 17, batch 50, loss[loss=0.1932, simple_loss=0.2868, pruned_loss=0.04976, over 7020.00 frames.], tot_loss[loss=0.1984, simple_loss=0.283, pruned_loss=0.05689, over 315332.28 frames.], batch size: 26, lr: 9.92e-04 +2022-05-27 17:22:21,131 INFO [train.py:823] (3/4) Epoch 17, batch 100, loss[loss=0.1973, simple_loss=0.295, pruned_loss=0.04979, over 6963.00 frames.], tot_loss[loss=0.1956, simple_loss=0.2811, pruned_loss=0.05506, over 560659.06 frames.], batch size: 26, lr: 9.91e-04 +2022-05-27 17:23:00,142 INFO [train.py:823] (3/4) Epoch 17, batch 150, loss[loss=0.186, simple_loss=0.2597, pruned_loss=0.0562, over 7194.00 frames.], tot_loss[loss=0.1944, simple_loss=0.2801, pruned_loss=0.05431, over 748192.67 frames.], batch size: 18, lr: 9.89e-04 +2022-05-27 17:23:38,703 INFO [train.py:823] (3/4) Epoch 17, batch 200, loss[loss=0.2193, simple_loss=0.2985, pruned_loss=0.07005, over 6816.00 frames.], tot_loss[loss=0.1952, simple_loss=0.2808, pruned_loss=0.05477, over 897810.46 frames.], batch size: 29, lr: 9.88e-04 +2022-05-27 17:24:18,001 INFO [train.py:823] (3/4) Epoch 17, batch 250, loss[loss=0.1922, simple_loss=0.2801, pruned_loss=0.05214, over 7339.00 frames.], tot_loss[loss=0.1945, simple_loss=0.2803, pruned_loss=0.05439, over 1018330.56 frames.], batch size: 23, lr: 9.86e-04 +2022-05-27 17:24:57,549 INFO [train.py:823] (3/4) Epoch 17, batch 300, loss[loss=0.167, simple_loss=0.2533, pruned_loss=0.04038, over 7299.00 frames.], tot_loss[loss=0.193, simple_loss=0.2793, pruned_loss=0.05334, over 1104169.23 frames.], batch size: 18, lr: 9.85e-04 +2022-05-27 17:25:36,811 INFO [train.py:823] (3/4) Epoch 17, batch 350, loss[loss=0.1807, simple_loss=0.267, pruned_loss=0.0472, over 7389.00 frames.], tot_loss[loss=0.1918, simple_loss=0.2777, pruned_loss=0.05299, over 1170536.05 frames.], batch size: 19, lr: 9.84e-04 +2022-05-27 17:26:17,372 INFO [train.py:823] (3/4) Epoch 17, batch 400, loss[loss=0.2017, simple_loss=0.2774, pruned_loss=0.06304, over 7097.00 frames.], tot_loss[loss=0.1908, simple_loss=0.276, pruned_loss=0.05278, over 1226844.42 frames.], batch size: 19, lr: 9.82e-04 +2022-05-27 17:26:56,076 INFO [train.py:823] (3/4) Epoch 17, batch 450, loss[loss=0.1972, simple_loss=0.2875, pruned_loss=0.05349, over 4749.00 frames.], tot_loss[loss=0.1901, simple_loss=0.2754, pruned_loss=0.05245, over 1261436.47 frames.], batch size: 47, lr: 9.81e-04 +2022-05-27 17:27:35,204 INFO [train.py:823] (3/4) Epoch 17, batch 500, loss[loss=0.1663, simple_loss=0.2439, pruned_loss=0.04436, over 6982.00 frames.], tot_loss[loss=0.191, simple_loss=0.276, pruned_loss=0.05304, over 1297110.86 frames.], batch size: 16, lr: 9.79e-04 +2022-05-27 17:28:14,798 INFO [train.py:823] (3/4) Epoch 17, batch 550, loss[loss=0.2093, simple_loss=0.2986, pruned_loss=0.05997, over 7105.00 frames.], tot_loss[loss=0.1916, simple_loss=0.2764, pruned_loss=0.05344, over 1326830.05 frames.], batch size: 20, lr: 9.78e-04 +2022-05-27 17:28:53,872 INFO [train.py:823] (3/4) Epoch 17, batch 600, loss[loss=0.1856, simple_loss=0.2751, pruned_loss=0.04803, over 7310.00 frames.], tot_loss[loss=0.1912, simple_loss=0.2761, pruned_loss=0.05315, over 1349116.14 frames.], batch size: 22, lr: 9.76e-04 +2022-05-27 17:29:33,276 INFO [train.py:823] (3/4) Epoch 17, batch 650, loss[loss=0.1851, simple_loss=0.2577, pruned_loss=0.05623, over 7017.00 frames.], tot_loss[loss=0.1907, simple_loss=0.2756, pruned_loss=0.05285, over 1362199.18 frames.], batch size: 16, lr: 9.75e-04 +2022-05-27 17:30:12,362 INFO [train.py:823] (3/4) Epoch 17, batch 700, loss[loss=0.1708, simple_loss=0.25, pruned_loss=0.04579, over 7201.00 frames.], tot_loss[loss=0.19, simple_loss=0.2753, pruned_loss=0.05232, over 1373430.29 frames.], batch size: 16, lr: 9.74e-04 +2022-05-27 17:30:51,344 INFO [train.py:823] (3/4) Epoch 17, batch 750, loss[loss=0.1685, simple_loss=0.2481, pruned_loss=0.04447, over 7159.00 frames.], tot_loss[loss=0.1901, simple_loss=0.2752, pruned_loss=0.05245, over 1385429.69 frames.], batch size: 17, lr: 9.72e-04 +2022-05-27 17:31:30,660 INFO [train.py:823] (3/4) Epoch 17, batch 800, loss[loss=0.1669, simple_loss=0.2494, pruned_loss=0.04221, over 7034.00 frames.], tot_loss[loss=0.1897, simple_loss=0.2753, pruned_loss=0.05208, over 1389268.34 frames.], batch size: 16, lr: 9.71e-04 +2022-05-27 17:32:13,821 INFO [train.py:823] (3/4) Epoch 17, batch 850, loss[loss=0.2052, simple_loss=0.2941, pruned_loss=0.05819, over 7422.00 frames.], tot_loss[loss=0.1894, simple_loss=0.2748, pruned_loss=0.05199, over 1395556.45 frames.], batch size: 22, lr: 9.69e-04 +2022-05-27 17:32:52,885 INFO [train.py:823] (3/4) Epoch 17, batch 900, loss[loss=0.1725, simple_loss=0.2536, pruned_loss=0.04567, over 7290.00 frames.], tot_loss[loss=0.1899, simple_loss=0.2751, pruned_loss=0.05236, over 1401122.19 frames.], batch size: 17, lr: 9.68e-04 +2022-05-27 17:33:32,062 INFO [train.py:823] (3/4) Epoch 17, batch 950, loss[loss=0.2059, simple_loss=0.2792, pruned_loss=0.06635, over 4684.00 frames.], tot_loss[loss=0.1904, simple_loss=0.2753, pruned_loss=0.05278, over 1397027.55 frames.], batch size: 46, lr: 9.67e-04 +2022-05-27 17:33:44,918 INFO [train.py:823] (3/4) Epoch 18, batch 0, loss[loss=0.1905, simple_loss=0.2803, pruned_loss=0.05036, over 7375.00 frames.], tot_loss[loss=0.1905, simple_loss=0.2803, pruned_loss=0.05036, over 7375.00 frames.], batch size: 21, lr: 9.41e-04 +2022-05-27 17:34:24,228 INFO [train.py:823] (3/4) Epoch 18, batch 50, loss[loss=0.1786, simple_loss=0.27, pruned_loss=0.04356, over 7323.00 frames.], tot_loss[loss=0.1877, simple_loss=0.2731, pruned_loss=0.05114, over 322125.82 frames.], batch size: 23, lr: 9.40e-04 +2022-05-27 17:35:03,347 INFO [train.py:823] (3/4) Epoch 18, batch 100, loss[loss=0.1743, simple_loss=0.2721, pruned_loss=0.03822, over 7285.00 frames.], tot_loss[loss=0.1884, simple_loss=0.2744, pruned_loss=0.05115, over 563710.17 frames.], batch size: 20, lr: 9.39e-04 +2022-05-27 17:35:42,636 INFO [train.py:823] (3/4) Epoch 18, batch 150, loss[loss=0.1822, simple_loss=0.2619, pruned_loss=0.05129, over 7202.00 frames.], tot_loss[loss=0.1882, simple_loss=0.2739, pruned_loss=0.0512, over 756530.52 frames.], batch size: 20, lr: 9.37e-04 +2022-05-27 17:36:21,798 INFO [train.py:823] (3/4) Epoch 18, batch 200, loss[loss=0.2, simple_loss=0.2843, pruned_loss=0.05788, over 7294.00 frames.], tot_loss[loss=0.1884, simple_loss=0.2742, pruned_loss=0.05127, over 907243.01 frames.], batch size: 21, lr: 9.36e-04 +2022-05-27 17:37:01,180 INFO [train.py:823] (3/4) Epoch 18, batch 250, loss[loss=0.1881, simple_loss=0.2696, pruned_loss=0.05333, over 7303.00 frames.], tot_loss[loss=0.1898, simple_loss=0.2756, pruned_loss=0.05198, over 1015489.32 frames.], batch size: 22, lr: 9.35e-04 +2022-05-27 17:37:40,149 INFO [train.py:823] (3/4) Epoch 18, batch 300, loss[loss=0.1932, simple_loss=0.2581, pruned_loss=0.06413, over 7425.00 frames.], tot_loss[loss=0.1887, simple_loss=0.274, pruned_loss=0.0517, over 1106366.84 frames.], batch size: 18, lr: 9.33e-04 +2022-05-27 17:38:19,187 INFO [train.py:823] (3/4) Epoch 18, batch 350, loss[loss=0.1999, simple_loss=0.2899, pruned_loss=0.05499, over 7283.00 frames.], tot_loss[loss=0.1883, simple_loss=0.2738, pruned_loss=0.05146, over 1175489.25 frames.], batch size: 20, lr: 9.32e-04 +2022-05-27 17:38:58,311 INFO [train.py:823] (3/4) Epoch 18, batch 400, loss[loss=0.1824, simple_loss=0.2646, pruned_loss=0.05016, over 7383.00 frames.], tot_loss[loss=0.1891, simple_loss=0.2746, pruned_loss=0.05178, over 1227721.05 frames.], batch size: 19, lr: 9.31e-04 +2022-05-27 17:39:39,243 INFO [train.py:823] (3/4) Epoch 18, batch 450, loss[loss=0.2005, simple_loss=0.2903, pruned_loss=0.05529, over 7143.00 frames.], tot_loss[loss=0.1894, simple_loss=0.2751, pruned_loss=0.05183, over 1269727.28 frames.], batch size: 23, lr: 9.29e-04 +2022-05-27 17:40:18,353 INFO [train.py:823] (3/4) Epoch 18, batch 500, loss[loss=0.1862, simple_loss=0.2792, pruned_loss=0.04657, over 7417.00 frames.], tot_loss[loss=0.1881, simple_loss=0.2739, pruned_loss=0.05117, over 1307434.63 frames.], batch size: 22, lr: 9.28e-04 +2022-05-27 17:40:59,007 INFO [train.py:823] (3/4) Epoch 18, batch 550, loss[loss=0.1828, simple_loss=0.2731, pruned_loss=0.04625, over 7317.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2723, pruned_loss=0.05042, over 1335838.73 frames.], batch size: 23, lr: 9.27e-04 +2022-05-27 17:41:37,933 INFO [train.py:823] (3/4) Epoch 18, batch 600, loss[loss=0.1781, simple_loss=0.2655, pruned_loss=0.04539, over 7287.00 frames.], tot_loss[loss=0.1874, simple_loss=0.2728, pruned_loss=0.05102, over 1357854.03 frames.], batch size: 19, lr: 9.26e-04 +2022-05-27 17:42:17,276 INFO [train.py:823] (3/4) Epoch 18, batch 650, loss[loss=0.176, simple_loss=0.2557, pruned_loss=0.04817, over 7090.00 frames.], tot_loss[loss=0.1862, simple_loss=0.2718, pruned_loss=0.0503, over 1371368.07 frames.], batch size: 19, lr: 9.24e-04 +2022-05-27 17:42:57,835 INFO [train.py:823] (3/4) Epoch 18, batch 700, loss[loss=0.1546, simple_loss=0.2462, pruned_loss=0.0315, over 7189.00 frames.], tot_loss[loss=0.1865, simple_loss=0.272, pruned_loss=0.05053, over 1376760.33 frames.], batch size: 19, lr: 9.23e-04 +2022-05-27 17:43:37,171 INFO [train.py:823] (3/4) Epoch 18, batch 750, loss[loss=0.1767, simple_loss=0.2623, pruned_loss=0.04553, over 7091.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2732, pruned_loss=0.05086, over 1388482.30 frames.], batch size: 18, lr: 9.22e-04 +2022-05-27 17:44:16,159 INFO [train.py:823] (3/4) Epoch 18, batch 800, loss[loss=0.1972, simple_loss=0.2837, pruned_loss=0.05535, over 7190.00 frames.], tot_loss[loss=0.1871, simple_loss=0.2728, pruned_loss=0.05073, over 1391853.96 frames.], batch size: 20, lr: 9.21e-04 +2022-05-27 17:44:55,795 INFO [train.py:823] (3/4) Epoch 18, batch 850, loss[loss=0.1635, simple_loss=0.2673, pruned_loss=0.02987, over 7175.00 frames.], tot_loss[loss=0.1863, simple_loss=0.272, pruned_loss=0.05024, over 1394442.32 frames.], batch size: 21, lr: 9.19e-04 +2022-05-27 17:45:34,723 INFO [train.py:823] (3/4) Epoch 18, batch 900, loss[loss=0.1429, simple_loss=0.2296, pruned_loss=0.02812, over 7163.00 frames.], tot_loss[loss=0.1866, simple_loss=0.2727, pruned_loss=0.05024, over 1401359.45 frames.], batch size: 17, lr: 9.18e-04 +2022-05-27 17:46:13,719 INFO [train.py:823] (3/4) Epoch 18, batch 950, loss[loss=0.2023, simple_loss=0.2854, pruned_loss=0.0596, over 4883.00 frames.], tot_loss[loss=0.1875, simple_loss=0.2736, pruned_loss=0.05072, over 1373610.98 frames.], batch size: 47, lr: 9.17e-04 +2022-05-27 17:46:27,043 INFO [train.py:823] (3/4) Epoch 19, batch 0, loss[loss=0.2024, simple_loss=0.2822, pruned_loss=0.06135, over 7005.00 frames.], tot_loss[loss=0.2024, simple_loss=0.2822, pruned_loss=0.06135, over 7005.00 frames.], batch size: 26, lr: 8.94e-04 +2022-05-27 17:47:05,754 INFO [train.py:823] (3/4) Epoch 19, batch 50, loss[loss=0.1902, simple_loss=0.28, pruned_loss=0.05014, over 7195.00 frames.], tot_loss[loss=0.1853, simple_loss=0.27, pruned_loss=0.05026, over 325362.00 frames.], batch size: 19, lr: 8.92e-04 +2022-05-27 17:47:45,000 INFO [train.py:823] (3/4) Epoch 19, batch 100, loss[loss=0.1948, simple_loss=0.2827, pruned_loss=0.05347, over 6625.00 frames.], tot_loss[loss=0.1852, simple_loss=0.2705, pruned_loss=0.04994, over 565889.03 frames.], batch size: 34, lr: 8.91e-04 +2022-05-27 17:48:24,287 INFO [train.py:823] (3/4) Epoch 19, batch 150, loss[loss=0.1908, simple_loss=0.2726, pruned_loss=0.05454, over 7098.00 frames.], tot_loss[loss=0.1825, simple_loss=0.2686, pruned_loss=0.04817, over 759137.70 frames.], batch size: 18, lr: 8.90e-04 +2022-05-27 17:49:03,212 INFO [train.py:823] (3/4) Epoch 19, batch 200, loss[loss=0.2055, simple_loss=0.2853, pruned_loss=0.0628, over 7159.00 frames.], tot_loss[loss=0.1825, simple_loss=0.2687, pruned_loss=0.04816, over 901824.53 frames.], batch size: 22, lr: 8.89e-04 +2022-05-27 17:49:41,954 INFO [train.py:823] (3/4) Epoch 19, batch 250, loss[loss=0.1739, simple_loss=0.2491, pruned_loss=0.04935, over 7101.00 frames.], tot_loss[loss=0.1832, simple_loss=0.2699, pruned_loss=0.04827, over 1018347.95 frames.], batch size: 19, lr: 8.88e-04 +2022-05-27 17:50:22,400 INFO [train.py:823] (3/4) Epoch 19, batch 300, loss[loss=0.1695, simple_loss=0.2551, pruned_loss=0.04192, over 7014.00 frames.], tot_loss[loss=0.1853, simple_loss=0.2715, pruned_loss=0.04958, over 1110478.46 frames.], batch size: 16, lr: 8.87e-04 +2022-05-27 17:51:01,426 INFO [train.py:823] (3/4) Epoch 19, batch 350, loss[loss=0.2525, simple_loss=0.3293, pruned_loss=0.08787, over 7304.00 frames.], tot_loss[loss=0.186, simple_loss=0.2724, pruned_loss=0.04982, over 1178380.77 frames.], batch size: 18, lr: 8.85e-04 +2022-05-27 17:51:40,872 INFO [train.py:823] (3/4) Epoch 19, batch 400, loss[loss=0.1738, simple_loss=0.2525, pruned_loss=0.04758, over 7016.00 frames.], tot_loss[loss=0.1864, simple_loss=0.2725, pruned_loss=0.05014, over 1235591.99 frames.], batch size: 16, lr: 8.84e-04 +2022-05-27 17:52:20,042 INFO [train.py:823] (3/4) Epoch 19, batch 450, loss[loss=0.1952, simple_loss=0.2855, pruned_loss=0.05245, over 7159.00 frames.], tot_loss[loss=0.1871, simple_loss=0.2736, pruned_loss=0.05029, over 1277956.00 frames.], batch size: 23, lr: 8.83e-04 +2022-05-27 17:52:59,900 INFO [train.py:823] (3/4) Epoch 19, batch 500, loss[loss=0.196, simple_loss=0.2917, pruned_loss=0.05013, over 6512.00 frames.], tot_loss[loss=0.1863, simple_loss=0.2731, pruned_loss=0.04978, over 1310285.63 frames.], batch size: 34, lr: 8.82e-04 +2022-05-27 17:53:39,243 INFO [train.py:823] (3/4) Epoch 19, batch 550, loss[loss=0.1918, simple_loss=0.2742, pruned_loss=0.05471, over 7009.00 frames.], tot_loss[loss=0.186, simple_loss=0.273, pruned_loss=0.04953, over 1331819.63 frames.], batch size: 17, lr: 8.81e-04 +2022-05-27 17:54:18,383 INFO [train.py:823] (3/4) Epoch 19, batch 600, loss[loss=0.1584, simple_loss=0.2488, pruned_loss=0.03403, over 7105.00 frames.], tot_loss[loss=0.1858, simple_loss=0.2731, pruned_loss=0.04926, over 1352052.08 frames.], batch size: 19, lr: 8.80e-04 +2022-05-27 17:54:57,267 INFO [train.py:823] (3/4) Epoch 19, batch 650, loss[loss=0.1547, simple_loss=0.2375, pruned_loss=0.03596, over 7023.00 frames.], tot_loss[loss=0.1847, simple_loss=0.2715, pruned_loss=0.04891, over 1365919.92 frames.], batch size: 17, lr: 8.78e-04 +2022-05-27 17:55:36,379 INFO [train.py:823] (3/4) Epoch 19, batch 700, loss[loss=0.1926, simple_loss=0.2783, pruned_loss=0.05347, over 7058.00 frames.], tot_loss[loss=0.1842, simple_loss=0.2712, pruned_loss=0.04858, over 1377195.61 frames.], batch size: 26, lr: 8.77e-04 +2022-05-27 17:56:14,930 INFO [train.py:823] (3/4) Epoch 19, batch 750, loss[loss=0.1956, simple_loss=0.291, pruned_loss=0.05008, over 7370.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2709, pruned_loss=0.04842, over 1387348.92 frames.], batch size: 21, lr: 8.76e-04 +2022-05-27 17:56:54,545 INFO [train.py:823] (3/4) Epoch 19, batch 800, loss[loss=0.1981, simple_loss=0.2923, pruned_loss=0.05197, over 7301.00 frames.], tot_loss[loss=0.184, simple_loss=0.2711, pruned_loss=0.04847, over 1397011.11 frames.], batch size: 22, lr: 8.75e-04 +2022-05-27 17:57:33,483 INFO [train.py:823] (3/4) Epoch 19, batch 850, loss[loss=0.202, simple_loss=0.2902, pruned_loss=0.05689, over 7377.00 frames.], tot_loss[loss=0.1841, simple_loss=0.2711, pruned_loss=0.04857, over 1402778.46 frames.], batch size: 21, lr: 8.74e-04 +2022-05-27 17:58:12,608 INFO [train.py:823] (3/4) Epoch 19, batch 900, loss[loss=0.2156, simple_loss=0.3046, pruned_loss=0.06326, over 7065.00 frames.], tot_loss[loss=0.1848, simple_loss=0.2714, pruned_loss=0.0491, over 1394939.82 frames.], batch size: 26, lr: 8.73e-04 +2022-05-27 17:59:02,775 INFO [train.py:823] (3/4) Epoch 20, batch 0, loss[loss=0.1806, simple_loss=0.2748, pruned_loss=0.04323, over 6600.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2748, pruned_loss=0.04323, over 6600.00 frames.], batch size: 34, lr: 8.51e-04 +2022-05-27 17:59:42,323 INFO [train.py:823] (3/4) Epoch 20, batch 50, loss[loss=0.1624, simple_loss=0.2433, pruned_loss=0.04079, over 7311.00 frames.], tot_loss[loss=0.1789, simple_loss=0.2662, pruned_loss=0.04579, over 322564.14 frames.], batch size: 18, lr: 8.49e-04 +2022-05-27 18:00:21,178 INFO [train.py:823] (3/4) Epoch 20, batch 100, loss[loss=0.1992, simple_loss=0.2787, pruned_loss=0.05984, over 5271.00 frames.], tot_loss[loss=0.1792, simple_loss=0.2668, pruned_loss=0.04586, over 563269.67 frames.], batch size: 47, lr: 8.48e-04 +2022-05-27 18:01:00,403 INFO [train.py:823] (3/4) Epoch 20, batch 150, loss[loss=0.164, simple_loss=0.2444, pruned_loss=0.0418, over 7291.00 frames.], tot_loss[loss=0.1812, simple_loss=0.2684, pruned_loss=0.04699, over 752754.92 frames.], batch size: 17, lr: 8.47e-04 +2022-05-27 18:01:41,365 INFO [train.py:823] (3/4) Epoch 20, batch 200, loss[loss=0.1805, simple_loss=0.2606, pruned_loss=0.05019, over 7004.00 frames.], tot_loss[loss=0.182, simple_loss=0.2694, pruned_loss=0.04734, over 903503.65 frames.], batch size: 16, lr: 8.46e-04 +2022-05-27 18:02:20,210 INFO [train.py:823] (3/4) Epoch 20, batch 250, loss[loss=0.2106, simple_loss=0.2859, pruned_loss=0.06759, over 7315.00 frames.], tot_loss[loss=0.1832, simple_loss=0.2699, pruned_loss=0.04825, over 1017599.35 frames.], batch size: 18, lr: 8.45e-04 +2022-05-27 18:02:59,646 INFO [train.py:823] (3/4) Epoch 20, batch 300, loss[loss=0.1785, simple_loss=0.2803, pruned_loss=0.03833, over 7308.00 frames.], tot_loss[loss=0.1838, simple_loss=0.2709, pruned_loss=0.04839, over 1107894.14 frames.], batch size: 22, lr: 8.44e-04 +2022-05-27 18:03:38,763 INFO [train.py:823] (3/4) Epoch 20, batch 350, loss[loss=0.188, simple_loss=0.277, pruned_loss=0.04945, over 7192.00 frames.], tot_loss[loss=0.1844, simple_loss=0.2714, pruned_loss=0.04874, over 1177034.75 frames.], batch size: 20, lr: 8.43e-04 +2022-05-27 18:04:18,926 INFO [train.py:823] (3/4) Epoch 20, batch 400, loss[loss=0.1914, simple_loss=0.2858, pruned_loss=0.04851, over 7165.00 frames.], tot_loss[loss=0.1845, simple_loss=0.2713, pruned_loss=0.04884, over 1231968.92 frames.], batch size: 23, lr: 8.42e-04 +2022-05-27 18:04:57,906 INFO [train.py:823] (3/4) Epoch 20, batch 450, loss[loss=0.1632, simple_loss=0.2349, pruned_loss=0.04575, over 7164.00 frames.], tot_loss[loss=0.1844, simple_loss=0.2714, pruned_loss=0.04867, over 1269717.21 frames.], batch size: 17, lr: 8.41e-04 +2022-05-27 18:05:38,468 INFO [train.py:823] (3/4) Epoch 20, batch 500, loss[loss=0.1959, simple_loss=0.2819, pruned_loss=0.05493, over 7025.00 frames.], tot_loss[loss=0.1843, simple_loss=0.2715, pruned_loss=0.04852, over 1305551.25 frames.], batch size: 17, lr: 8.40e-04 +2022-05-27 18:06:18,349 INFO [train.py:823] (3/4) Epoch 20, batch 550, loss[loss=0.2072, simple_loss=0.2877, pruned_loss=0.06334, over 7142.00 frames.], tot_loss[loss=0.1829, simple_loss=0.2698, pruned_loss=0.04805, over 1333648.98 frames.], batch size: 23, lr: 8.39e-04 +2022-05-27 18:06:57,399 INFO [train.py:823] (3/4) Epoch 20, batch 600, loss[loss=0.1798, simple_loss=0.2614, pruned_loss=0.04908, over 7092.00 frames.], tot_loss[loss=0.183, simple_loss=0.2699, pruned_loss=0.04809, over 1349815.66 frames.], batch size: 18, lr: 8.38e-04 +2022-05-27 18:07:37,182 INFO [train.py:823] (3/4) Epoch 20, batch 650, loss[loss=0.1879, simple_loss=0.2723, pruned_loss=0.05176, over 6928.00 frames.], tot_loss[loss=0.1836, simple_loss=0.2701, pruned_loss=0.04856, over 1365287.06 frames.], batch size: 29, lr: 8.37e-04 +2022-05-27 18:08:16,125 INFO [train.py:823] (3/4) Epoch 20, batch 700, loss[loss=0.1935, simple_loss=0.2844, pruned_loss=0.05129, over 7097.00 frames.], tot_loss[loss=0.1839, simple_loss=0.2702, pruned_loss=0.04883, over 1379889.45 frames.], batch size: 18, lr: 8.36e-04 +2022-05-27 18:08:55,667 INFO [train.py:823] (3/4) Epoch 20, batch 750, loss[loss=0.1993, simple_loss=0.2865, pruned_loss=0.05606, over 7285.00 frames.], tot_loss[loss=0.1833, simple_loss=0.2691, pruned_loss=0.04876, over 1389447.25 frames.], batch size: 21, lr: 8.35e-04 +2022-05-27 18:09:34,542 INFO [train.py:823] (3/4) Epoch 20, batch 800, loss[loss=0.1636, simple_loss=0.2539, pruned_loss=0.03662, over 7014.00 frames.], tot_loss[loss=0.1828, simple_loss=0.2696, pruned_loss=0.04802, over 1397710.93 frames.], batch size: 17, lr: 8.34e-04 +2022-05-27 18:10:13,635 INFO [train.py:823] (3/4) Epoch 20, batch 850, loss[loss=0.2171, simple_loss=0.3069, pruned_loss=0.06362, over 7018.00 frames.], tot_loss[loss=0.1824, simple_loss=0.2691, pruned_loss=0.04791, over 1400763.91 frames.], batch size: 26, lr: 8.33e-04 +2022-05-27 18:10:52,741 INFO [train.py:823] (3/4) Epoch 20, batch 900, loss[loss=0.1356, simple_loss=0.2216, pruned_loss=0.0248, over 6796.00 frames.], tot_loss[loss=0.183, simple_loss=0.2696, pruned_loss=0.04824, over 1397727.15 frames.], batch size: 15, lr: 8.31e-04 +2022-05-27 18:11:42,532 INFO [train.py:823] (3/4) Epoch 21, batch 0, loss[loss=0.186, simple_loss=0.2629, pruned_loss=0.0545, over 7196.00 frames.], tot_loss[loss=0.186, simple_loss=0.2629, pruned_loss=0.0545, over 7196.00 frames.], batch size: 18, lr: 8.11e-04 +2022-05-27 18:12:21,428 INFO [train.py:823] (3/4) Epoch 21, batch 50, loss[loss=0.195, simple_loss=0.2888, pruned_loss=0.05057, over 7176.00 frames.], tot_loss[loss=0.1796, simple_loss=0.267, pruned_loss=0.04604, over 318777.74 frames.], batch size: 25, lr: 8.10e-04 +2022-05-27 18:13:00,549 INFO [train.py:823] (3/4) Epoch 21, batch 100, loss[loss=0.1926, simple_loss=0.2898, pruned_loss=0.04765, over 6488.00 frames.], tot_loss[loss=0.1792, simple_loss=0.2659, pruned_loss=0.04626, over 562583.16 frames.], batch size: 34, lr: 8.09e-04 +2022-05-27 18:13:40,063 INFO [train.py:823] (3/4) Epoch 21, batch 150, loss[loss=0.1833, simple_loss=0.2617, pruned_loss=0.05248, over 7281.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2662, pruned_loss=0.04651, over 755674.57 frames.], batch size: 20, lr: 8.08e-04 +2022-05-27 18:14:19,977 INFO [train.py:823] (3/4) Epoch 21, batch 200, loss[loss=0.1693, simple_loss=0.247, pruned_loss=0.04581, over 7291.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2668, pruned_loss=0.04632, over 903116.72 frames.], batch size: 18, lr: 8.07e-04 +2022-05-27 18:14:59,215 INFO [train.py:823] (3/4) Epoch 21, batch 250, loss[loss=0.1629, simple_loss=0.2454, pruned_loss=0.04018, over 7283.00 frames.], tot_loss[loss=0.1795, simple_loss=0.2665, pruned_loss=0.04624, over 1011286.32 frames.], batch size: 20, lr: 8.06e-04 +2022-05-27 18:15:37,916 INFO [train.py:823] (3/4) Epoch 21, batch 300, loss[loss=0.1677, simple_loss=0.2595, pruned_loss=0.03795, over 6622.00 frames.], tot_loss[loss=0.181, simple_loss=0.268, pruned_loss=0.04704, over 1099538.36 frames.], batch size: 34, lr: 8.05e-04 +2022-05-27 18:16:17,467 INFO [train.py:823] (3/4) Epoch 21, batch 350, loss[loss=0.1963, simple_loss=0.2972, pruned_loss=0.04773, over 7416.00 frames.], tot_loss[loss=0.1807, simple_loss=0.268, pruned_loss=0.04668, over 1170407.74 frames.], batch size: 22, lr: 8.04e-04 +2022-05-27 18:16:56,547 INFO [train.py:823] (3/4) Epoch 21, batch 400, loss[loss=0.164, simple_loss=0.2437, pruned_loss=0.04209, over 7297.00 frames.], tot_loss[loss=0.182, simple_loss=0.2693, pruned_loss=0.04732, over 1225729.28 frames.], batch size: 17, lr: 8.03e-04 +2022-05-27 18:17:36,078 INFO [train.py:823] (3/4) Epoch 21, batch 450, loss[loss=0.1807, simple_loss=0.2732, pruned_loss=0.04411, over 7189.00 frames.], tot_loss[loss=0.1815, simple_loss=0.2687, pruned_loss=0.04711, over 1270355.17 frames.], batch size: 21, lr: 8.02e-04 +2022-05-27 18:18:15,348 INFO [train.py:823] (3/4) Epoch 21, batch 500, loss[loss=0.1499, simple_loss=0.2315, pruned_loss=0.03413, over 7181.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2686, pruned_loss=0.04663, over 1303730.54 frames.], batch size: 18, lr: 8.01e-04 +2022-05-27 18:18:54,692 INFO [train.py:823] (3/4) Epoch 21, batch 550, loss[loss=0.1819, simple_loss=0.2772, pruned_loss=0.04333, over 7372.00 frames.], tot_loss[loss=0.1795, simple_loss=0.2672, pruned_loss=0.04591, over 1335434.53 frames.], batch size: 21, lr: 8.00e-04 +2022-05-27 18:19:33,863 INFO [train.py:823] (3/4) Epoch 21, batch 600, loss[loss=0.1804, simple_loss=0.2735, pruned_loss=0.04364, over 6640.00 frames.], tot_loss[loss=0.18, simple_loss=0.2676, pruned_loss=0.04621, over 1352936.60 frames.], batch size: 34, lr: 8.00e-04 +2022-05-27 18:20:13,204 INFO [train.py:823] (3/4) Epoch 21, batch 650, loss[loss=0.1884, simple_loss=0.2832, pruned_loss=0.04685, over 7302.00 frames.], tot_loss[loss=0.1814, simple_loss=0.2685, pruned_loss=0.04717, over 1368950.96 frames.], batch size: 22, lr: 7.99e-04 +2022-05-27 18:20:52,558 INFO [train.py:823] (3/4) Epoch 21, batch 700, loss[loss=0.1892, simple_loss=0.2789, pruned_loss=0.04971, over 7188.00 frames.], tot_loss[loss=0.1824, simple_loss=0.2697, pruned_loss=0.04758, over 1379664.86 frames.], batch size: 20, lr: 7.98e-04 +2022-05-27 18:21:31,596 INFO [train.py:823] (3/4) Epoch 21, batch 750, loss[loss=0.2057, simple_loss=0.2829, pruned_loss=0.06429, over 7227.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2684, pruned_loss=0.04754, over 1378049.41 frames.], batch size: 25, lr: 7.97e-04 +2022-05-27 18:22:10,735 INFO [train.py:823] (3/4) Epoch 21, batch 800, loss[loss=0.1872, simple_loss=0.2773, pruned_loss=0.04849, over 7338.00 frames.], tot_loss[loss=0.1817, simple_loss=0.2685, pruned_loss=0.04744, over 1384414.58 frames.], batch size: 23, lr: 7.96e-04 +2022-05-27 18:22:50,083 INFO [train.py:823] (3/4) Epoch 21, batch 850, loss[loss=0.1759, simple_loss=0.2632, pruned_loss=0.04429, over 7206.00 frames.], tot_loss[loss=0.1809, simple_loss=0.2678, pruned_loss=0.04702, over 1389769.94 frames.], batch size: 20, lr: 7.95e-04 +2022-05-27 18:23:29,129 INFO [train.py:823] (3/4) Epoch 21, batch 900, loss[loss=0.1855, simple_loss=0.2713, pruned_loss=0.04985, over 7367.00 frames.], tot_loss[loss=0.1806, simple_loss=0.2676, pruned_loss=0.04681, over 1388052.14 frames.], batch size: 20, lr: 7.94e-04 +2022-05-27 18:24:19,606 INFO [train.py:823] (3/4) Epoch 22, batch 0, loss[loss=0.1832, simple_loss=0.271, pruned_loss=0.04776, over 7367.00 frames.], tot_loss[loss=0.1832, simple_loss=0.271, pruned_loss=0.04776, over 7367.00 frames.], batch size: 21, lr: 7.75e-04 +2022-05-27 18:25:00,023 INFO [train.py:823] (3/4) Epoch 22, batch 50, loss[loss=0.1857, simple_loss=0.2715, pruned_loss=0.04993, over 7158.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2626, pruned_loss=0.04536, over 322231.64 frames.], batch size: 22, lr: 7.74e-04 +2022-05-27 18:25:39,883 INFO [train.py:823] (3/4) Epoch 22, batch 100, loss[loss=0.2055, simple_loss=0.2926, pruned_loss=0.0592, over 7108.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2661, pruned_loss=0.04668, over 567522.49 frames.], batch size: 20, lr: 7.73e-04 +2022-05-27 18:26:18,921 INFO [train.py:823] (3/4) Epoch 22, batch 150, loss[loss=0.2055, simple_loss=0.2883, pruned_loss=0.06137, over 4709.00 frames.], tot_loss[loss=0.1799, simple_loss=0.2668, pruned_loss=0.04644, over 754747.52 frames.], batch size: 46, lr: 7.73e-04 +2022-05-27 18:26:59,594 INFO [train.py:823] (3/4) Epoch 22, batch 200, loss[loss=0.1793, simple_loss=0.2769, pruned_loss=0.04082, over 7110.00 frames.], tot_loss[loss=0.1787, simple_loss=0.2665, pruned_loss=0.04552, over 898540.36 frames.], batch size: 20, lr: 7.72e-04 +2022-05-27 18:27:38,552 INFO [train.py:823] (3/4) Epoch 22, batch 250, loss[loss=0.1698, simple_loss=0.2521, pruned_loss=0.04372, over 7111.00 frames.], tot_loss[loss=0.179, simple_loss=0.2668, pruned_loss=0.04558, over 1016344.23 frames.], batch size: 18, lr: 7.71e-04 +2022-05-27 18:28:18,103 INFO [train.py:823] (3/4) Epoch 22, batch 300, loss[loss=0.1481, simple_loss=0.2396, pruned_loss=0.02833, over 7185.00 frames.], tot_loss[loss=0.1797, simple_loss=0.2671, pruned_loss=0.04618, over 1102390.70 frames.], batch size: 18, lr: 7.70e-04 +2022-05-27 18:28:58,698 INFO [train.py:823] (3/4) Epoch 22, batch 350, loss[loss=0.1707, simple_loss=0.2551, pruned_loss=0.04314, over 6959.00 frames.], tot_loss[loss=0.1796, simple_loss=0.2666, pruned_loss=0.04633, over 1173769.66 frames.], batch size: 29, lr: 7.69e-04 +2022-05-27 18:29:37,877 INFO [train.py:823] (3/4) Epoch 22, batch 400, loss[loss=0.1947, simple_loss=0.2891, pruned_loss=0.05018, over 7191.00 frames.], tot_loss[loss=0.1784, simple_loss=0.2654, pruned_loss=0.04567, over 1231004.99 frames.], batch size: 21, lr: 7.68e-04 +2022-05-27 18:30:17,540 INFO [train.py:823] (3/4) Epoch 22, batch 450, loss[loss=0.2001, simple_loss=0.2651, pruned_loss=0.06756, over 6867.00 frames.], tot_loss[loss=0.1779, simple_loss=0.265, pruned_loss=0.0454, over 1277487.58 frames.], batch size: 15, lr: 7.67e-04 +2022-05-27 18:30:56,609 INFO [train.py:823] (3/4) Epoch 22, batch 500, loss[loss=0.1814, simple_loss=0.2769, pruned_loss=0.04294, over 6536.00 frames.], tot_loss[loss=0.1777, simple_loss=0.2646, pruned_loss=0.04542, over 1304063.79 frames.], batch size: 34, lr: 7.66e-04 +2022-05-27 18:31:36,028 INFO [train.py:823] (3/4) Epoch 22, batch 550, loss[loss=0.1835, simple_loss=0.2839, pruned_loss=0.04158, over 6875.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2639, pruned_loss=0.04461, over 1330853.32 frames.], batch size: 29, lr: 7.65e-04 +2022-05-27 18:32:15,272 INFO [train.py:823] (3/4) Epoch 22, batch 600, loss[loss=0.1615, simple_loss=0.228, pruned_loss=0.04755, over 7023.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2641, pruned_loss=0.04522, over 1350076.86 frames.], batch size: 17, lr: 7.65e-04 +2022-05-27 18:32:54,293 INFO [train.py:823] (3/4) Epoch 22, batch 650, loss[loss=0.1812, simple_loss=0.2803, pruned_loss=0.04102, over 7112.00 frames.], tot_loss[loss=0.1772, simple_loss=0.2641, pruned_loss=0.04519, over 1359773.81 frames.], batch size: 20, lr: 7.64e-04 +2022-05-27 18:33:33,454 INFO [train.py:823] (3/4) Epoch 22, batch 700, loss[loss=0.1784, simple_loss=0.2685, pruned_loss=0.04417, over 7093.00 frames.], tot_loss[loss=0.1777, simple_loss=0.2647, pruned_loss=0.04536, over 1372683.96 frames.], batch size: 19, lr: 7.63e-04 +2022-05-27 18:34:12,346 INFO [train.py:823] (3/4) Epoch 22, batch 750, loss[loss=0.1824, simple_loss=0.267, pruned_loss=0.04884, over 7006.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2645, pruned_loss=0.045, over 1381709.99 frames.], batch size: 16, lr: 7.62e-04 +2022-05-27 18:34:51,849 INFO [train.py:823] (3/4) Epoch 22, batch 800, loss[loss=0.1608, simple_loss=0.2494, pruned_loss=0.03614, over 7381.00 frames.], tot_loss[loss=0.177, simple_loss=0.2643, pruned_loss=0.04484, over 1391358.33 frames.], batch size: 20, lr: 7.61e-04 +2022-05-27 18:35:30,977 INFO [train.py:823] (3/4) Epoch 22, batch 850, loss[loss=0.1864, simple_loss=0.2789, pruned_loss=0.04691, over 6433.00 frames.], tot_loss[loss=0.1773, simple_loss=0.265, pruned_loss=0.04482, over 1400670.62 frames.], batch size: 34, lr: 7.60e-04 +2022-05-27 18:36:10,349 INFO [train.py:823] (3/4) Epoch 22, batch 900, loss[loss=0.2052, simple_loss=0.295, pruned_loss=0.05771, over 7154.00 frames.], tot_loss[loss=0.1776, simple_loss=0.2653, pruned_loss=0.04497, over 1405432.18 frames.], batch size: 23, lr: 7.59e-04 +2022-05-27 18:37:01,226 INFO [train.py:823] (3/4) Epoch 23, batch 0, loss[loss=0.1401, simple_loss=0.2268, pruned_loss=0.02669, over 7233.00 frames.], tot_loss[loss=0.1401, simple_loss=0.2268, pruned_loss=0.02669, over 7233.00 frames.], batch size: 16, lr: 7.42e-04 +2022-05-27 18:37:41,658 INFO [train.py:823] (3/4) Epoch 23, batch 50, loss[loss=0.1675, simple_loss=0.2585, pruned_loss=0.0383, over 7387.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2643, pruned_loss=0.04414, over 321206.00 frames.], batch size: 21, lr: 7.41e-04 +2022-05-27 18:38:20,857 INFO [train.py:823] (3/4) Epoch 23, batch 100, loss[loss=0.1664, simple_loss=0.2561, pruned_loss=0.03837, over 7368.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2639, pruned_loss=0.04397, over 562013.06 frames.], batch size: 20, lr: 7.41e-04 +2022-05-27 18:39:00,168 INFO [train.py:823] (3/4) Epoch 23, batch 150, loss[loss=0.1745, simple_loss=0.2524, pruned_loss=0.04829, over 7324.00 frames.], tot_loss[loss=0.1768, simple_loss=0.2651, pruned_loss=0.04427, over 751975.59 frames.], batch size: 18, lr: 7.40e-04 +2022-05-27 18:39:39,888 INFO [train.py:823] (3/4) Epoch 23, batch 200, loss[loss=0.1885, simple_loss=0.2673, pruned_loss=0.0549, over 4991.00 frames.], tot_loss[loss=0.1763, simple_loss=0.2646, pruned_loss=0.04404, over 897660.47 frames.], batch size: 47, lr: 7.39e-04 +2022-05-27 18:40:19,154 INFO [train.py:823] (3/4) Epoch 23, batch 250, loss[loss=0.1845, simple_loss=0.2576, pruned_loss=0.05563, over 7102.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2649, pruned_loss=0.04427, over 1017295.13 frames.], batch size: 18, lr: 7.38e-04 +2022-05-27 18:40:58,352 INFO [train.py:823] (3/4) Epoch 23, batch 300, loss[loss=0.1721, simple_loss=0.2631, pruned_loss=0.04057, over 7287.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2662, pruned_loss=0.04498, over 1110589.70 frames.], batch size: 22, lr: 7.37e-04 +2022-05-27 18:41:37,516 INFO [train.py:823] (3/4) Epoch 23, batch 350, loss[loss=0.1728, simple_loss=0.262, pruned_loss=0.04178, over 7276.00 frames.], tot_loss[loss=0.1771, simple_loss=0.265, pruned_loss=0.04462, over 1181517.72 frames.], batch size: 20, lr: 7.36e-04 +2022-05-27 18:42:16,418 INFO [train.py:823] (3/4) Epoch 23, batch 400, loss[loss=0.1456, simple_loss=0.2272, pruned_loss=0.03194, over 7303.00 frames.], tot_loss[loss=0.1755, simple_loss=0.2631, pruned_loss=0.04395, over 1233521.08 frames.], batch size: 17, lr: 7.36e-04 +2022-05-27 18:42:55,402 INFO [train.py:823] (3/4) Epoch 23, batch 450, loss[loss=0.1928, simple_loss=0.2746, pruned_loss=0.05549, over 5333.00 frames.], tot_loss[loss=0.1757, simple_loss=0.2632, pruned_loss=0.04408, over 1272817.08 frames.], batch size: 46, lr: 7.35e-04 +2022-05-27 18:43:34,471 INFO [train.py:823] (3/4) Epoch 23, batch 500, loss[loss=0.1921, simple_loss=0.2911, pruned_loss=0.04652, over 6413.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2645, pruned_loss=0.04451, over 1302570.90 frames.], batch size: 34, lr: 7.34e-04 +2022-05-27 18:44:13,818 INFO [train.py:823] (3/4) Epoch 23, batch 550, loss[loss=0.1931, simple_loss=0.2899, pruned_loss=0.04815, over 7206.00 frames.], tot_loss[loss=0.1773, simple_loss=0.2656, pruned_loss=0.04446, over 1332882.41 frames.], batch size: 24, lr: 7.33e-04 +2022-05-27 18:44:52,854 INFO [train.py:823] (3/4) Epoch 23, batch 600, loss[loss=0.1859, simple_loss=0.2683, pruned_loss=0.05175, over 4475.00 frames.], tot_loss[loss=0.1777, simple_loss=0.2657, pruned_loss=0.04488, over 1348447.77 frames.], batch size: 47, lr: 7.32e-04 +2022-05-27 18:45:32,309 INFO [train.py:823] (3/4) Epoch 23, batch 650, loss[loss=0.1737, simple_loss=0.265, pruned_loss=0.04124, over 7106.00 frames.], tot_loss[loss=0.1762, simple_loss=0.2644, pruned_loss=0.04398, over 1362977.09 frames.], batch size: 19, lr: 7.32e-04 +2022-05-27 18:46:11,209 INFO [train.py:823] (3/4) Epoch 23, batch 700, loss[loss=0.1611, simple_loss=0.2396, pruned_loss=0.04135, over 6989.00 frames.], tot_loss[loss=0.176, simple_loss=0.2638, pruned_loss=0.04407, over 1369696.88 frames.], batch size: 16, lr: 7.31e-04 +2022-05-27 18:46:50,525 INFO [train.py:823] (3/4) Epoch 23, batch 750, loss[loss=0.1735, simple_loss=0.2605, pruned_loss=0.04325, over 4894.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2646, pruned_loss=0.04439, over 1376159.89 frames.], batch size: 46, lr: 7.30e-04 +2022-05-27 18:47:30,863 INFO [train.py:823] (3/4) Epoch 23, batch 800, loss[loss=0.1747, simple_loss=0.2517, pruned_loss=0.0489, over 7198.00 frames.], tot_loss[loss=0.1757, simple_loss=0.2635, pruned_loss=0.04393, over 1388884.77 frames.], batch size: 18, lr: 7.29e-04 +2022-05-27 18:48:10,097 INFO [train.py:823] (3/4) Epoch 23, batch 850, loss[loss=0.2169, simple_loss=0.309, pruned_loss=0.0624, over 7153.00 frames.], tot_loss[loss=0.1764, simple_loss=0.2639, pruned_loss=0.04444, over 1396064.42 frames.], batch size: 23, lr: 7.28e-04 +2022-05-27 18:48:48,882 INFO [train.py:823] (3/4) Epoch 23, batch 900, loss[loss=0.1553, simple_loss=0.2342, pruned_loss=0.03824, over 7005.00 frames.], tot_loss[loss=0.1754, simple_loss=0.2627, pruned_loss=0.04405, over 1400473.15 frames.], batch size: 17, lr: 7.28e-04 +2022-05-27 18:49:41,177 INFO [train.py:823] (3/4) Epoch 24, batch 0, loss[loss=0.1603, simple_loss=0.2408, pruned_loss=0.03987, over 7301.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2408, pruned_loss=0.03987, over 7301.00 frames.], batch size: 18, lr: 7.12e-04 +2022-05-27 18:50:19,964 INFO [train.py:823] (3/4) Epoch 24, batch 50, loss[loss=0.1486, simple_loss=0.2306, pruned_loss=0.0333, over 7162.00 frames.], tot_loss[loss=0.1736, simple_loss=0.2617, pruned_loss=0.04276, over 318684.03 frames.], batch size: 17, lr: 7.11e-04 +2022-05-27 18:51:00,451 INFO [train.py:823] (3/4) Epoch 24, batch 100, loss[loss=0.1821, simple_loss=0.2716, pruned_loss=0.04628, over 6502.00 frames.], tot_loss[loss=0.1763, simple_loss=0.264, pruned_loss=0.04429, over 559266.72 frames.], batch size: 34, lr: 7.10e-04 +2022-05-27 18:51:39,740 INFO [train.py:823] (3/4) Epoch 24, batch 150, loss[loss=0.1746, simple_loss=0.2653, pruned_loss=0.04197, over 6947.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2648, pruned_loss=0.04416, over 749961.44 frames.], batch size: 29, lr: 7.10e-04 +2022-05-27 18:52:18,842 INFO [train.py:823] (3/4) Epoch 24, batch 200, loss[loss=0.195, simple_loss=0.2853, pruned_loss=0.0523, over 7284.00 frames.], tot_loss[loss=0.1753, simple_loss=0.2633, pruned_loss=0.04359, over 899671.12 frames.], batch size: 21, lr: 7.09e-04 +2022-05-27 18:52:58,098 INFO [train.py:823] (3/4) Epoch 24, batch 250, loss[loss=0.1413, simple_loss=0.2158, pruned_loss=0.03338, over 7283.00 frames.], tot_loss[loss=0.1743, simple_loss=0.2622, pruned_loss=0.04314, over 1015311.09 frames.], batch size: 17, lr: 7.08e-04 +2022-05-27 18:53:37,244 INFO [train.py:823] (3/4) Epoch 24, batch 300, loss[loss=0.1942, simple_loss=0.282, pruned_loss=0.0532, over 7326.00 frames.], tot_loss[loss=0.176, simple_loss=0.2634, pruned_loss=0.0443, over 1100125.32 frames.], batch size: 23, lr: 7.07e-04 +2022-05-27 18:54:15,991 INFO [train.py:823] (3/4) Epoch 24, batch 350, loss[loss=0.1177, simple_loss=0.1969, pruned_loss=0.01928, over 7303.00 frames.], tot_loss[loss=0.1762, simple_loss=0.263, pruned_loss=0.0447, over 1175170.99 frames.], batch size: 17, lr: 7.07e-04 +2022-05-27 18:54:55,416 INFO [train.py:823] (3/4) Epoch 24, batch 400, loss[loss=0.1814, simple_loss=0.2643, pruned_loss=0.04921, over 7335.00 frames.], tot_loss[loss=0.1758, simple_loss=0.2626, pruned_loss=0.04445, over 1227707.11 frames.], batch size: 23, lr: 7.06e-04 +2022-05-27 18:55:34,398 INFO [train.py:823] (3/4) Epoch 24, batch 450, loss[loss=0.1632, simple_loss=0.2389, pruned_loss=0.0437, over 7187.00 frames.], tot_loss[loss=0.1747, simple_loss=0.2617, pruned_loss=0.04382, over 1269240.81 frames.], batch size: 18, lr: 7.05e-04 +2022-05-27 18:56:13,822 INFO [train.py:823] (3/4) Epoch 24, batch 500, loss[loss=0.1624, simple_loss=0.2615, pruned_loss=0.0316, over 7294.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2613, pruned_loss=0.04354, over 1304751.91 frames.], batch size: 21, lr: 7.04e-04 +2022-05-27 18:56:52,927 INFO [train.py:823] (3/4) Epoch 24, batch 550, loss[loss=0.1872, simple_loss=0.2851, pruned_loss=0.04463, over 6468.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2621, pruned_loss=0.04346, over 1328594.44 frames.], batch size: 34, lr: 7.04e-04 +2022-05-27 18:57:32,029 INFO [train.py:823] (3/4) Epoch 24, batch 600, loss[loss=0.1997, simple_loss=0.2862, pruned_loss=0.05663, over 7148.00 frames.], tot_loss[loss=0.175, simple_loss=0.2627, pruned_loss=0.04368, over 1346230.34 frames.], batch size: 23, lr: 7.03e-04 +2022-05-27 18:58:10,850 INFO [train.py:823] (3/4) Epoch 24, batch 650, loss[loss=0.1717, simple_loss=0.2542, pruned_loss=0.04456, over 7097.00 frames.], tot_loss[loss=0.1752, simple_loss=0.2631, pruned_loss=0.0437, over 1359447.53 frames.], batch size: 19, lr: 7.02e-04 +2022-05-27 18:58:49,878 INFO [train.py:823] (3/4) Epoch 24, batch 700, loss[loss=0.1803, simple_loss=0.2797, pruned_loss=0.04044, over 7164.00 frames.], tot_loss[loss=0.1759, simple_loss=0.2643, pruned_loss=0.0438, over 1372411.12 frames.], batch size: 22, lr: 7.01e-04 +2022-05-27 18:59:29,125 INFO [train.py:823] (3/4) Epoch 24, batch 750, loss[loss=0.1446, simple_loss=0.239, pruned_loss=0.02508, over 7110.00 frames.], tot_loss[loss=0.1758, simple_loss=0.264, pruned_loss=0.04379, over 1385176.64 frames.], batch size: 20, lr: 7.01e-04 +2022-05-27 19:00:08,857 INFO [train.py:823] (3/4) Epoch 24, batch 800, loss[loss=0.1517, simple_loss=0.2353, pruned_loss=0.03401, over 7256.00 frames.], tot_loss[loss=0.1756, simple_loss=0.2637, pruned_loss=0.04373, over 1393410.46 frames.], batch size: 16, lr: 7.00e-04 +2022-05-27 19:00:47,542 INFO [train.py:823] (3/4) Epoch 24, batch 850, loss[loss=0.1786, simple_loss=0.2721, pruned_loss=0.04252, over 7093.00 frames.], tot_loss[loss=0.1749, simple_loss=0.263, pruned_loss=0.04342, over 1396853.47 frames.], batch size: 20, lr: 6.99e-04 +2022-05-27 19:01:28,182 INFO [train.py:823] (3/4) Epoch 24, batch 900, loss[loss=0.1747, simple_loss=0.2632, pruned_loss=0.04309, over 6495.00 frames.], tot_loss[loss=0.1752, simple_loss=0.2633, pruned_loss=0.04353, over 1400176.05 frames.], batch size: 34, lr: 6.98e-04 +2022-05-27 19:02:07,095 INFO [train.py:823] (3/4) Epoch 24, batch 950, loss[loss=0.1661, simple_loss=0.2543, pruned_loss=0.03891, over 7091.00 frames.], tot_loss[loss=0.1766, simple_loss=0.2646, pruned_loss=0.04433, over 1395377.36 frames.], batch size: 18, lr: 6.98e-04 +2022-05-27 19:02:19,649 INFO [train.py:823] (3/4) Epoch 25, batch 0, loss[loss=0.1893, simple_loss=0.2859, pruned_loss=0.04633, over 7290.00 frames.], tot_loss[loss=0.1893, simple_loss=0.2859, pruned_loss=0.04633, over 7290.00 frames.], batch size: 21, lr: 6.84e-04 +2022-05-27 19:02:58,881 INFO [train.py:823] (3/4) Epoch 25, batch 50, loss[loss=0.1388, simple_loss=0.2236, pruned_loss=0.02696, over 7311.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2588, pruned_loss=0.04295, over 323952.04 frames.], batch size: 17, lr: 6.83e-04 +2022-05-27 19:03:37,893 INFO [train.py:823] (3/4) Epoch 25, batch 100, loss[loss=0.1526, simple_loss=0.2531, pruned_loss=0.02602, over 6863.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2598, pruned_loss=0.04276, over 563756.79 frames.], batch size: 15, lr: 6.82e-04 +2022-05-27 19:04:17,025 INFO [train.py:823] (3/4) Epoch 25, batch 150, loss[loss=0.2142, simple_loss=0.3092, pruned_loss=0.05963, over 7315.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2593, pruned_loss=0.04218, over 758516.71 frames.], batch size: 22, lr: 6.82e-04 +2022-05-27 19:04:56,406 INFO [train.py:823] (3/4) Epoch 25, batch 200, loss[loss=0.1886, simple_loss=0.2795, pruned_loss=0.04887, over 7288.00 frames.], tot_loss[loss=0.173, simple_loss=0.2605, pruned_loss=0.04271, over 910708.14 frames.], batch size: 21, lr: 6.81e-04 +2022-05-27 19:05:35,276 INFO [train.py:823] (3/4) Epoch 25, batch 250, loss[loss=0.1607, simple_loss=0.2369, pruned_loss=0.04222, over 7287.00 frames.], tot_loss[loss=0.1737, simple_loss=0.2609, pruned_loss=0.04322, over 1023237.71 frames.], batch size: 17, lr: 6.80e-04 +2022-05-27 19:06:14,247 INFO [train.py:823] (3/4) Epoch 25, batch 300, loss[loss=0.1795, simple_loss=0.2757, pruned_loss=0.04168, over 7287.00 frames.], tot_loss[loss=0.1732, simple_loss=0.261, pruned_loss=0.04276, over 1116635.04 frames.], batch size: 21, lr: 6.80e-04 +2022-05-27 19:06:53,480 INFO [train.py:823] (3/4) Epoch 25, batch 350, loss[loss=0.2126, simple_loss=0.2907, pruned_loss=0.06728, over 7145.00 frames.], tot_loss[loss=0.1731, simple_loss=0.261, pruned_loss=0.04255, over 1182724.80 frames.], batch size: 23, lr: 6.79e-04 +2022-05-27 19:07:32,626 INFO [train.py:823] (3/4) Epoch 25, batch 400, loss[loss=0.1723, simple_loss=0.2566, pruned_loss=0.04404, over 7239.00 frames.], tot_loss[loss=0.174, simple_loss=0.2624, pruned_loss=0.04278, over 1239017.38 frames.], batch size: 25, lr: 6.78e-04 +2022-05-27 19:08:11,861 INFO [train.py:823] (3/4) Epoch 25, batch 450, loss[loss=0.1587, simple_loss=0.2421, pruned_loss=0.03766, over 6815.00 frames.], tot_loss[loss=0.1746, simple_loss=0.2629, pruned_loss=0.04318, over 1269972.45 frames.], batch size: 15, lr: 6.77e-04 +2022-05-27 19:08:50,603 INFO [train.py:823] (3/4) Epoch 25, batch 500, loss[loss=0.1592, simple_loss=0.2365, pruned_loss=0.04094, over 7003.00 frames.], tot_loss[loss=0.1745, simple_loss=0.2629, pruned_loss=0.04299, over 1303703.64 frames.], batch size: 16, lr: 6.77e-04 +2022-05-27 19:09:29,781 INFO [train.py:823] (3/4) Epoch 25, batch 550, loss[loss=0.1662, simple_loss=0.2566, pruned_loss=0.03794, over 7185.00 frames.], tot_loss[loss=0.1731, simple_loss=0.2615, pruned_loss=0.04238, over 1330660.59 frames.], batch size: 21, lr: 6.76e-04 +2022-05-27 19:10:09,040 INFO [train.py:823] (3/4) Epoch 25, batch 600, loss[loss=0.2035, simple_loss=0.2962, pruned_loss=0.05544, over 7281.00 frames.], tot_loss[loss=0.172, simple_loss=0.2601, pruned_loss=0.04196, over 1343635.36 frames.], batch size: 21, lr: 6.75e-04 +2022-05-27 19:10:49,744 INFO [train.py:823] (3/4) Epoch 25, batch 650, loss[loss=0.1575, simple_loss=0.2549, pruned_loss=0.02999, over 7282.00 frames.], tot_loss[loss=0.172, simple_loss=0.2602, pruned_loss=0.04185, over 1358812.33 frames.], batch size: 20, lr: 6.75e-04 +2022-05-27 19:11:28,979 INFO [train.py:823] (3/4) Epoch 25, batch 700, loss[loss=0.1588, simple_loss=0.235, pruned_loss=0.04132, over 7157.00 frames.], tot_loss[loss=0.172, simple_loss=0.2603, pruned_loss=0.04182, over 1370112.66 frames.], batch size: 17, lr: 6.74e-04 +2022-05-27 19:12:08,303 INFO [train.py:823] (3/4) Epoch 25, batch 750, loss[loss=0.1767, simple_loss=0.2737, pruned_loss=0.03982, over 7376.00 frames.], tot_loss[loss=0.1712, simple_loss=0.2594, pruned_loss=0.04151, over 1378485.75 frames.], batch size: 20, lr: 6.73e-04 +2022-05-27 19:12:47,532 INFO [train.py:823] (3/4) Epoch 25, batch 800, loss[loss=0.16, simple_loss=0.2509, pruned_loss=0.03458, over 7186.00 frames.], tot_loss[loss=0.1708, simple_loss=0.2588, pruned_loss=0.04139, over 1390966.57 frames.], batch size: 21, lr: 6.73e-04 +2022-05-27 19:13:27,895 INFO [train.py:823] (3/4) Epoch 25, batch 850, loss[loss=0.1599, simple_loss=0.2411, pruned_loss=0.03936, over 7199.00 frames.], tot_loss[loss=0.1717, simple_loss=0.26, pruned_loss=0.0417, over 1396346.30 frames.], batch size: 18, lr: 6.72e-04 +2022-05-27 19:14:08,803 INFO [train.py:823] (3/4) Epoch 25, batch 900, loss[loss=0.2011, simple_loss=0.2913, pruned_loss=0.05546, over 6520.00 frames.], tot_loss[loss=0.1722, simple_loss=0.2606, pruned_loss=0.04193, over 1394619.07 frames.], batch size: 34, lr: 6.71e-04 +2022-05-27 19:14:59,590 INFO [train.py:823] (3/4) Epoch 26, batch 0, loss[loss=0.1624, simple_loss=0.2309, pruned_loss=0.04692, over 7322.00 frames.], tot_loss[loss=0.1624, simple_loss=0.2309, pruned_loss=0.04692, over 7322.00 frames.], batch size: 18, lr: 6.58e-04 +2022-05-27 19:15:38,666 INFO [train.py:823] (3/4) Epoch 26, batch 50, loss[loss=0.1742, simple_loss=0.2596, pruned_loss=0.04445, over 7369.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2588, pruned_loss=0.04101, over 324754.65 frames.], batch size: 20, lr: 6.57e-04 +2022-05-27 19:16:17,999 INFO [train.py:823] (3/4) Epoch 26, batch 100, loss[loss=0.1517, simple_loss=0.2456, pruned_loss=0.02887, over 7212.00 frames.], tot_loss[loss=0.1735, simple_loss=0.2612, pruned_loss=0.04285, over 568123.11 frames.], batch size: 25, lr: 6.56e-04 +2022-05-27 19:16:57,097 INFO [train.py:823] (3/4) Epoch 26, batch 150, loss[loss=0.2038, simple_loss=0.2914, pruned_loss=0.05817, over 7164.00 frames.], tot_loss[loss=0.1729, simple_loss=0.26, pruned_loss=0.04287, over 754137.70 frames.], batch size: 25, lr: 6.56e-04 +2022-05-27 19:17:36,098 INFO [train.py:823] (3/4) Epoch 26, batch 200, loss[loss=0.1476, simple_loss=0.2431, pruned_loss=0.02608, over 7101.00 frames.], tot_loss[loss=0.1724, simple_loss=0.2599, pruned_loss=0.04245, over 900298.44 frames.], batch size: 18, lr: 6.55e-04 +2022-05-27 19:18:15,568 INFO [train.py:823] (3/4) Epoch 26, batch 250, loss[loss=0.1679, simple_loss=0.2585, pruned_loss=0.03866, over 7404.00 frames.], tot_loss[loss=0.1723, simple_loss=0.2597, pruned_loss=0.04247, over 1015815.07 frames.], batch size: 22, lr: 6.55e-04 +2022-05-27 19:18:59,348 INFO [train.py:823] (3/4) Epoch 26, batch 300, loss[loss=0.1727, simple_loss=0.2644, pruned_loss=0.04048, over 7103.00 frames.], tot_loss[loss=0.1721, simple_loss=0.2597, pruned_loss=0.04219, over 1106369.97 frames.], batch size: 20, lr: 6.54e-04 +2022-05-27 19:19:38,192 INFO [train.py:823] (3/4) Epoch 26, batch 350, loss[loss=0.1562, simple_loss=0.2481, pruned_loss=0.0322, over 6630.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2598, pruned_loss=0.04192, over 1177762.13 frames.], batch size: 34, lr: 6.53e-04 +2022-05-27 19:20:17,219 INFO [train.py:823] (3/4) Epoch 26, batch 400, loss[loss=0.1653, simple_loss=0.2545, pruned_loss=0.03802, over 7133.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2602, pruned_loss=0.04177, over 1234719.89 frames.], batch size: 23, lr: 6.53e-04 +2022-05-27 19:20:55,987 INFO [train.py:823] (3/4) Epoch 26, batch 450, loss[loss=0.1855, simple_loss=0.2677, pruned_loss=0.05165, over 7188.00 frames.], tot_loss[loss=0.1713, simple_loss=0.2598, pruned_loss=0.04139, over 1274797.37 frames.], batch size: 21, lr: 6.52e-04 +2022-05-27 19:21:34,778 INFO [train.py:823] (3/4) Epoch 26, batch 500, loss[loss=0.1733, simple_loss=0.269, pruned_loss=0.0388, over 7026.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2599, pruned_loss=0.04162, over 1305367.79 frames.], batch size: 26, lr: 6.51e-04 +2022-05-27 19:22:13,821 INFO [train.py:823] (3/4) Epoch 26, batch 550, loss[loss=0.1672, simple_loss=0.2455, pruned_loss=0.04446, over 6995.00 frames.], tot_loss[loss=0.1719, simple_loss=0.2602, pruned_loss=0.04179, over 1328943.85 frames.], batch size: 16, lr: 6.51e-04 +2022-05-27 19:22:52,199 INFO [train.py:823] (3/4) Epoch 26, batch 600, loss[loss=0.1771, simple_loss=0.2735, pruned_loss=0.04038, over 7306.00 frames.], tot_loss[loss=0.1711, simple_loss=0.2596, pruned_loss=0.04132, over 1348789.29 frames.], batch size: 22, lr: 6.50e-04 +2022-05-27 19:23:31,000 INFO [train.py:823] (3/4) Epoch 26, batch 650, loss[loss=0.1806, simple_loss=0.2675, pruned_loss=0.04692, over 7347.00 frames.], tot_loss[loss=0.1716, simple_loss=0.2594, pruned_loss=0.04184, over 1358037.22 frames.], batch size: 23, lr: 6.49e-04 +2022-05-27 19:24:10,412 INFO [train.py:823] (3/4) Epoch 26, batch 700, loss[loss=0.1681, simple_loss=0.27, pruned_loss=0.03308, over 6982.00 frames.], tot_loss[loss=0.1718, simple_loss=0.2599, pruned_loss=0.04185, over 1371249.80 frames.], batch size: 26, lr: 6.49e-04 +2022-05-27 19:24:49,321 INFO [train.py:823] (3/4) Epoch 26, batch 750, loss[loss=0.1902, simple_loss=0.2737, pruned_loss=0.05333, over 7296.00 frames.], tot_loss[loss=0.1726, simple_loss=0.2607, pruned_loss=0.04221, over 1374157.96 frames.], batch size: 19, lr: 6.48e-04 +2022-05-27 19:25:29,487 INFO [train.py:823] (3/4) Epoch 26, batch 800, loss[loss=0.1415, simple_loss=0.2304, pruned_loss=0.02623, over 7243.00 frames.], tot_loss[loss=0.1727, simple_loss=0.2609, pruned_loss=0.04224, over 1383275.76 frames.], batch size: 16, lr: 6.47e-04 +2022-05-27 19:26:08,524 INFO [train.py:823] (3/4) Epoch 26, batch 850, loss[loss=0.1486, simple_loss=0.2289, pruned_loss=0.0342, over 7231.00 frames.], tot_loss[loss=0.1739, simple_loss=0.2617, pruned_loss=0.04307, over 1395030.47 frames.], batch size: 16, lr: 6.47e-04 +2022-05-27 19:26:47,899 INFO [train.py:823] (3/4) Epoch 26, batch 900, loss[loss=0.1587, simple_loss=0.2443, pruned_loss=0.03658, over 7018.00 frames.], tot_loss[loss=0.1739, simple_loss=0.262, pruned_loss=0.04293, over 1396925.88 frames.], batch size: 17, lr: 6.46e-04 +2022-05-27 19:27:39,245 INFO [train.py:823] (3/4) Epoch 27, batch 0, loss[loss=0.1619, simple_loss=0.2487, pruned_loss=0.03758, over 7191.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2487, pruned_loss=0.03758, over 7191.00 frames.], batch size: 18, lr: 6.34e-04 +2022-05-27 19:28:18,661 INFO [train.py:823] (3/4) Epoch 27, batch 50, loss[loss=0.1525, simple_loss=0.2461, pruned_loss=0.02941, over 7184.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2561, pruned_loss=0.04027, over 321545.62 frames.], batch size: 18, lr: 6.33e-04 +2022-05-27 19:28:57,578 INFO [train.py:823] (3/4) Epoch 27, batch 100, loss[loss=0.1785, simple_loss=0.2688, pruned_loss=0.04405, over 7237.00 frames.], tot_loss[loss=0.1701, simple_loss=0.2588, pruned_loss=0.04068, over 563217.21 frames.], batch size: 25, lr: 6.32e-04 +2022-05-27 19:29:36,515 INFO [train.py:823] (3/4) Epoch 27, batch 150, loss[loss=0.1626, simple_loss=0.2402, pruned_loss=0.04247, over 7310.00 frames.], tot_loss[loss=0.1699, simple_loss=0.2586, pruned_loss=0.04056, over 752102.35 frames.], batch size: 18, lr: 6.32e-04 +2022-05-27 19:30:15,620 INFO [train.py:823] (3/4) Epoch 27, batch 200, loss[loss=0.1595, simple_loss=0.2528, pruned_loss=0.03306, over 7423.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2588, pruned_loss=0.04077, over 898785.67 frames.], batch size: 22, lr: 6.31e-04 +2022-05-27 19:30:54,990 INFO [train.py:823] (3/4) Epoch 27, batch 250, loss[loss=0.1437, simple_loss=0.2321, pruned_loss=0.02762, over 7037.00 frames.], tot_loss[loss=0.1705, simple_loss=0.2594, pruned_loss=0.04078, over 1011571.49 frames.], batch size: 17, lr: 6.31e-04 +2022-05-27 19:31:34,042 INFO [train.py:823] (3/4) Epoch 27, batch 300, loss[loss=0.1676, simple_loss=0.2613, pruned_loss=0.03692, over 7359.00 frames.], tot_loss[loss=0.1707, simple_loss=0.2594, pruned_loss=0.04105, over 1105085.86 frames.], batch size: 21, lr: 6.30e-04 +2022-05-27 19:32:13,813 INFO [train.py:823] (3/4) Epoch 27, batch 350, loss[loss=0.1325, simple_loss=0.2282, pruned_loss=0.0184, over 7292.00 frames.], tot_loss[loss=0.17, simple_loss=0.2585, pruned_loss=0.04072, over 1177122.55 frames.], batch size: 19, lr: 6.29e-04 +2022-05-27 19:32:52,597 INFO [train.py:823] (3/4) Epoch 27, batch 400, loss[loss=0.1745, simple_loss=0.2666, pruned_loss=0.04117, over 7279.00 frames.], tot_loss[loss=0.1698, simple_loss=0.2585, pruned_loss=0.0405, over 1231913.97 frames.], batch size: 20, lr: 6.29e-04 +2022-05-27 19:33:33,786 INFO [train.py:823] (3/4) Epoch 27, batch 450, loss[loss=0.1727, simple_loss=0.2626, pruned_loss=0.04143, over 5257.00 frames.], tot_loss[loss=0.1695, simple_loss=0.2579, pruned_loss=0.04055, over 1276058.50 frames.], batch size: 46, lr: 6.28e-04 +2022-05-27 19:34:12,458 INFO [train.py:823] (3/4) Epoch 27, batch 500, loss[loss=0.186, simple_loss=0.2761, pruned_loss=0.04789, over 7169.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2581, pruned_loss=0.04064, over 1300624.61 frames.], batch size: 23, lr: 6.28e-04 +2022-05-27 19:34:52,067 INFO [train.py:823] (3/4) Epoch 27, batch 550, loss[loss=0.1651, simple_loss=0.2565, pruned_loss=0.03687, over 7278.00 frames.], tot_loss[loss=0.17, simple_loss=0.2588, pruned_loss=0.04061, over 1328831.65 frames.], batch size: 20, lr: 6.27e-04 +2022-05-27 19:35:30,732 INFO [train.py:823] (3/4) Epoch 27, batch 600, loss[loss=0.1416, simple_loss=0.2218, pruned_loss=0.03073, over 7307.00 frames.], tot_loss[loss=0.1703, simple_loss=0.259, pruned_loss=0.04078, over 1354864.03 frames.], batch size: 18, lr: 6.26e-04 +2022-05-27 19:36:10,998 INFO [train.py:823] (3/4) Epoch 27, batch 650, loss[loss=0.1551, simple_loss=0.2457, pruned_loss=0.03224, over 7200.00 frames.], tot_loss[loss=0.1702, simple_loss=0.259, pruned_loss=0.04066, over 1373641.25 frames.], batch size: 19, lr: 6.26e-04 +2022-05-27 19:36:51,519 INFO [train.py:823] (3/4) Epoch 27, batch 700, loss[loss=0.1668, simple_loss=0.2683, pruned_loss=0.03264, over 7368.00 frames.], tot_loss[loss=0.1699, simple_loss=0.259, pruned_loss=0.0404, over 1384652.36 frames.], batch size: 21, lr: 6.25e-04 +2022-05-27 19:37:31,085 INFO [train.py:823] (3/4) Epoch 27, batch 750, loss[loss=0.1665, simple_loss=0.2474, pruned_loss=0.04284, over 7194.00 frames.], tot_loss[loss=0.1702, simple_loss=0.2592, pruned_loss=0.04062, over 1391853.05 frames.], batch size: 19, lr: 6.25e-04 +2022-05-27 19:38:09,954 INFO [train.py:823] (3/4) Epoch 27, batch 800, loss[loss=0.1848, simple_loss=0.2724, pruned_loss=0.04866, over 7151.00 frames.], tot_loss[loss=0.1715, simple_loss=0.26, pruned_loss=0.0415, over 1392743.17 frames.], batch size: 23, lr: 6.24e-04 +2022-05-27 19:38:49,299 INFO [train.py:823] (3/4) Epoch 27, batch 850, loss[loss=0.1779, simple_loss=0.2756, pruned_loss=0.04012, over 7110.00 frames.], tot_loss[loss=0.171, simple_loss=0.2592, pruned_loss=0.04139, over 1395979.18 frames.], batch size: 20, lr: 6.23e-04 +2022-05-27 19:39:28,633 INFO [train.py:823] (3/4) Epoch 27, batch 900, loss[loss=0.1409, simple_loss=0.2234, pruned_loss=0.02923, over 7273.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2588, pruned_loss=0.04101, over 1398341.42 frames.], batch size: 17, lr: 6.23e-04 +2022-05-27 19:40:22,732 INFO [train.py:823] (3/4) Epoch 28, batch 0, loss[loss=0.1717, simple_loss=0.2533, pruned_loss=0.04499, over 7193.00 frames.], tot_loss[loss=0.1717, simple_loss=0.2533, pruned_loss=0.04499, over 7193.00 frames.], batch size: 20, lr: 6.11e-04 +2022-05-27 19:41:02,208 INFO [train.py:823] (3/4) Epoch 28, batch 50, loss[loss=0.1664, simple_loss=0.2664, pruned_loss=0.03321, over 7114.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2559, pruned_loss=0.03988, over 316771.55 frames.], batch size: 20, lr: 6.11e-04 +2022-05-27 19:41:41,874 INFO [train.py:823] (3/4) Epoch 28, batch 100, loss[loss=0.1785, simple_loss=0.2707, pruned_loss=0.04313, over 7041.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2562, pruned_loss=0.0395, over 562605.90 frames.], batch size: 26, lr: 6.10e-04 +2022-05-27 19:42:21,133 INFO [train.py:823] (3/4) Epoch 28, batch 150, loss[loss=0.1837, simple_loss=0.2587, pruned_loss=0.05433, over 4480.00 frames.], tot_loss[loss=0.169, simple_loss=0.2567, pruned_loss=0.04066, over 750747.88 frames.], batch size: 46, lr: 6.09e-04 +2022-05-27 19:43:00,542 INFO [train.py:823] (3/4) Epoch 28, batch 200, loss[loss=0.1868, simple_loss=0.2777, pruned_loss=0.04793, over 7197.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2567, pruned_loss=0.04076, over 900950.91 frames.], batch size: 20, lr: 6.09e-04 +2022-05-27 19:43:39,811 INFO [train.py:823] (3/4) Epoch 28, batch 250, loss[loss=0.1466, simple_loss=0.2401, pruned_loss=0.02651, over 7332.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2559, pruned_loss=0.0395, over 1016695.28 frames.], batch size: 23, lr: 6.08e-04 +2022-05-27 19:44:19,082 INFO [train.py:823] (3/4) Epoch 28, batch 300, loss[loss=0.1843, simple_loss=0.2796, pruned_loss=0.0445, over 6983.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2565, pruned_loss=0.04008, over 1104761.99 frames.], batch size: 29, lr: 6.08e-04 +2022-05-27 19:44:58,540 INFO [train.py:823] (3/4) Epoch 28, batch 350, loss[loss=0.1931, simple_loss=0.2889, pruned_loss=0.04866, over 7355.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2572, pruned_loss=0.0403, over 1174949.22 frames.], batch size: 23, lr: 6.07e-04 +2022-05-27 19:45:37,863 INFO [train.py:823] (3/4) Epoch 28, batch 400, loss[loss=0.1868, simple_loss=0.2842, pruned_loss=0.04473, over 7288.00 frames.], tot_loss[loss=0.1688, simple_loss=0.257, pruned_loss=0.04028, over 1229194.22 frames.], batch size: 21, lr: 6.07e-04 +2022-05-27 19:46:16,868 INFO [train.py:823] (3/4) Epoch 28, batch 450, loss[loss=0.1655, simple_loss=0.2554, pruned_loss=0.0378, over 6882.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2569, pruned_loss=0.04012, over 1269471.76 frames.], batch size: 29, lr: 6.06e-04 +2022-05-27 19:46:56,129 INFO [train.py:823] (3/4) Epoch 28, batch 500, loss[loss=0.1908, simple_loss=0.2839, pruned_loss=0.04889, over 6925.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2559, pruned_loss=0.0397, over 1306225.49 frames.], batch size: 29, lr: 6.06e-04 +2022-05-27 19:47:35,322 INFO [train.py:823] (3/4) Epoch 28, batch 550, loss[loss=0.1689, simple_loss=0.2574, pruned_loss=0.04025, over 7104.00 frames.], tot_loss[loss=0.1683, simple_loss=0.2567, pruned_loss=0.03999, over 1329720.45 frames.], batch size: 20, lr: 6.05e-04 +2022-05-27 19:48:14,521 INFO [train.py:823] (3/4) Epoch 28, batch 600, loss[loss=0.1484, simple_loss=0.2415, pruned_loss=0.02764, over 7195.00 frames.], tot_loss[loss=0.1685, simple_loss=0.2571, pruned_loss=0.03995, over 1348303.75 frames.], batch size: 19, lr: 6.04e-04 +2022-05-27 19:48:53,690 INFO [train.py:823] (3/4) Epoch 28, batch 650, loss[loss=0.1535, simple_loss=0.2427, pruned_loss=0.03218, over 7281.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2573, pruned_loss=0.0399, over 1366866.40 frames.], batch size: 19, lr: 6.04e-04 +2022-05-27 19:49:34,190 INFO [train.py:823] (3/4) Epoch 28, batch 700, loss[loss=0.1473, simple_loss=0.2366, pruned_loss=0.029, over 7306.00 frames.], tot_loss[loss=0.169, simple_loss=0.2579, pruned_loss=0.04004, over 1376787.43 frames.], batch size: 18, lr: 6.03e-04 +2022-05-27 19:50:13,240 INFO [train.py:823] (3/4) Epoch 28, batch 750, loss[loss=0.2018, simple_loss=0.279, pruned_loss=0.0623, over 5113.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2577, pruned_loss=0.03978, over 1383278.07 frames.], batch size: 47, lr: 6.03e-04 +2022-05-27 19:50:52,559 INFO [train.py:823] (3/4) Epoch 28, batch 800, loss[loss=0.1377, simple_loss=0.233, pruned_loss=0.02118, over 7010.00 frames.], tot_loss[loss=0.1681, simple_loss=0.2577, pruned_loss=0.0393, over 1395206.81 frames.], batch size: 16, lr: 6.02e-04 +2022-05-27 19:51:31,452 INFO [train.py:823] (3/4) Epoch 28, batch 850, loss[loss=0.1615, simple_loss=0.2608, pruned_loss=0.0311, over 7370.00 frames.], tot_loss[loss=0.1684, simple_loss=0.258, pruned_loss=0.03935, over 1398631.55 frames.], batch size: 21, lr: 6.02e-04 +2022-05-27 19:52:10,756 INFO [train.py:823] (3/4) Epoch 28, batch 900, loss[loss=0.1803, simple_loss=0.2748, pruned_loss=0.04294, over 7376.00 frames.], tot_loss[loss=0.1688, simple_loss=0.2584, pruned_loss=0.03958, over 1401842.58 frames.], batch size: 21, lr: 6.01e-04 +2022-05-27 19:53:03,349 INFO [train.py:823] (3/4) Epoch 29, batch 0, loss[loss=0.1898, simple_loss=0.2725, pruned_loss=0.05357, over 6984.00 frames.], tot_loss[loss=0.1898, simple_loss=0.2725, pruned_loss=0.05357, over 6984.00 frames.], batch size: 26, lr: 5.90e-04 +2022-05-27 19:53:42,783 INFO [train.py:823] (3/4) Epoch 29, batch 50, loss[loss=0.1659, simple_loss=0.2579, pruned_loss=0.03689, over 7283.00 frames.], tot_loss[loss=0.1697, simple_loss=0.2576, pruned_loss=0.04089, over 321218.79 frames.], batch size: 21, lr: 5.90e-04 +2022-05-27 19:54:22,236 INFO [train.py:823] (3/4) Epoch 29, batch 100, loss[loss=0.1763, simple_loss=0.2551, pruned_loss=0.04877, over 7216.00 frames.], tot_loss[loss=0.1704, simple_loss=0.2582, pruned_loss=0.04136, over 569850.86 frames.], batch size: 24, lr: 5.89e-04 +2022-05-27 19:55:01,978 INFO [train.py:823] (3/4) Epoch 29, batch 150, loss[loss=0.1886, simple_loss=0.2689, pruned_loss=0.05417, over 7293.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2558, pruned_loss=0.04067, over 759500.72 frames.], batch size: 19, lr: 5.89e-04 +2022-05-27 19:55:40,947 INFO [train.py:823] (3/4) Epoch 29, batch 200, loss[loss=0.1798, simple_loss=0.2691, pruned_loss=0.04527, over 7340.00 frames.], tot_loss[loss=0.1693, simple_loss=0.2573, pruned_loss=0.04069, over 897931.45 frames.], batch size: 23, lr: 5.88e-04 +2022-05-27 19:56:21,516 INFO [train.py:823] (3/4) Epoch 29, batch 250, loss[loss=0.1822, simple_loss=0.2675, pruned_loss=0.04838, over 7391.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2552, pruned_loss=0.03921, over 1015100.09 frames.], batch size: 19, lr: 5.88e-04 +2022-05-27 19:57:00,590 INFO [train.py:823] (3/4) Epoch 29, batch 300, loss[loss=0.1843, simple_loss=0.2595, pruned_loss=0.05454, over 7282.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2554, pruned_loss=0.03969, over 1104604.26 frames.], batch size: 20, lr: 5.87e-04 +2022-05-27 19:57:40,101 INFO [train.py:823] (3/4) Epoch 29, batch 350, loss[loss=0.1646, simple_loss=0.2383, pruned_loss=0.04548, over 6764.00 frames.], tot_loss[loss=0.1669, simple_loss=0.2553, pruned_loss=0.03931, over 1173600.32 frames.], batch size: 15, lr: 5.87e-04 +2022-05-27 19:58:19,167 INFO [train.py:823] (3/4) Epoch 29, batch 400, loss[loss=0.1361, simple_loss=0.2216, pruned_loss=0.0253, over 7291.00 frames.], tot_loss[loss=0.1678, simple_loss=0.2566, pruned_loss=0.03943, over 1229901.91 frames.], batch size: 17, lr: 5.86e-04 +2022-05-27 19:58:59,813 INFO [train.py:823] (3/4) Epoch 29, batch 450, loss[loss=0.1566, simple_loss=0.2422, pruned_loss=0.0355, over 7094.00 frames.], tot_loss[loss=0.1679, simple_loss=0.2565, pruned_loss=0.03962, over 1270357.89 frames.], batch size: 18, lr: 5.85e-04 +2022-05-27 19:59:40,159 INFO [train.py:823] (3/4) Epoch 29, batch 500, loss[loss=0.1633, simple_loss=0.2527, pruned_loss=0.03698, over 7118.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2564, pruned_loss=0.03935, over 1298908.98 frames.], batch size: 20, lr: 5.85e-04 +2022-05-27 20:00:19,262 INFO [train.py:823] (3/4) Epoch 29, batch 550, loss[loss=0.1781, simple_loss=0.2631, pruned_loss=0.04659, over 6465.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2561, pruned_loss=0.03916, over 1327901.29 frames.], batch size: 34, lr: 5.84e-04 +2022-05-27 20:00:58,288 INFO [train.py:823] (3/4) Epoch 29, batch 600, loss[loss=0.169, simple_loss=0.255, pruned_loss=0.04145, over 6615.00 frames.], tot_loss[loss=0.1684, simple_loss=0.2574, pruned_loss=0.03972, over 1348248.92 frames.], batch size: 34, lr: 5.84e-04 +2022-05-27 20:01:37,842 INFO [train.py:823] (3/4) Epoch 29, batch 650, loss[loss=0.1627, simple_loss=0.2527, pruned_loss=0.03641, over 7375.00 frames.], tot_loss[loss=0.1691, simple_loss=0.2581, pruned_loss=0.04006, over 1365598.95 frames.], batch size: 20, lr: 5.83e-04 +2022-05-27 20:02:16,323 INFO [train.py:823] (3/4) Epoch 29, batch 700, loss[loss=0.1592, simple_loss=0.2427, pruned_loss=0.03783, over 7204.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2575, pruned_loss=0.03987, over 1372923.90 frames.], batch size: 19, lr: 5.83e-04 +2022-05-27 20:02:55,507 INFO [train.py:823] (3/4) Epoch 29, batch 750, loss[loss=0.1964, simple_loss=0.2776, pruned_loss=0.05764, over 4813.00 frames.], tot_loss[loss=0.1692, simple_loss=0.2582, pruned_loss=0.04009, over 1380966.66 frames.], batch size: 47, lr: 5.82e-04 +2022-05-27 20:03:34,181 INFO [train.py:823] (3/4) Epoch 29, batch 800, loss[loss=0.1492, simple_loss=0.2388, pruned_loss=0.0298, over 7191.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2572, pruned_loss=0.03953, over 1388449.87 frames.], batch size: 18, lr: 5.82e-04 +2022-05-27 20:04:13,281 INFO [train.py:823] (3/4) Epoch 29, batch 850, loss[loss=0.1801, simple_loss=0.2775, pruned_loss=0.0413, over 7257.00 frames.], tot_loss[loss=0.1676, simple_loss=0.2573, pruned_loss=0.03895, over 1397819.45 frames.], batch size: 24, lr: 5.81e-04 +2022-05-27 20:04:52,054 INFO [train.py:823] (3/4) Epoch 29, batch 900, loss[loss=0.2318, simple_loss=0.3072, pruned_loss=0.07822, over 7158.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2577, pruned_loss=0.03973, over 1396382.18 frames.], batch size: 22, lr: 5.81e-04 +2022-05-27 20:05:30,850 INFO [train.py:823] (3/4) Epoch 29, batch 950, loss[loss=0.2335, simple_loss=0.3176, pruned_loss=0.07464, over 5100.00 frames.], tot_loss[loss=0.1682, simple_loss=0.2569, pruned_loss=0.03974, over 1391855.04 frames.], batch size: 47, lr: 5.80e-04 +2022-05-27 20:05:46,312 INFO [train.py:823] (3/4) Epoch 30, batch 0, loss[loss=0.1778, simple_loss=0.271, pruned_loss=0.04231, over 7380.00 frames.], tot_loss[loss=0.1778, simple_loss=0.271, pruned_loss=0.04231, over 7380.00 frames.], batch size: 20, lr: 5.71e-04 +2022-05-27 20:06:25,476 INFO [train.py:823] (3/4) Epoch 30, batch 50, loss[loss=0.1462, simple_loss=0.2338, pruned_loss=0.02925, over 7099.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2533, pruned_loss=0.03809, over 314527.25 frames.], batch size: 19, lr: 5.70e-04 +2022-05-27 20:07:04,843 INFO [train.py:823] (3/4) Epoch 30, batch 100, loss[loss=0.1435, simple_loss=0.2258, pruned_loss=0.03062, over 7301.00 frames.], tot_loss[loss=0.164, simple_loss=0.2531, pruned_loss=0.03748, over 561111.50 frames.], batch size: 17, lr: 5.70e-04 +2022-05-27 20:07:43,703 INFO [train.py:823] (3/4) Epoch 30, batch 150, loss[loss=0.1618, simple_loss=0.2532, pruned_loss=0.03523, over 7149.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2552, pruned_loss=0.0382, over 752911.65 frames.], batch size: 23, lr: 5.69e-04 +2022-05-27 20:08:23,084 INFO [train.py:823] (3/4) Epoch 30, batch 200, loss[loss=0.1779, simple_loss=0.2864, pruned_loss=0.03469, over 7157.00 frames.], tot_loss[loss=0.1675, simple_loss=0.2567, pruned_loss=0.03916, over 900172.73 frames.], batch size: 23, lr: 5.69e-04 +2022-05-27 20:09:02,259 INFO [train.py:823] (3/4) Epoch 30, batch 250, loss[loss=0.1424, simple_loss=0.234, pruned_loss=0.02539, over 7098.00 frames.], tot_loss[loss=0.1674, simple_loss=0.2568, pruned_loss=0.03899, over 1012332.01 frames.], batch size: 19, lr: 5.68e-04 +2022-05-27 20:09:41,460 INFO [train.py:823] (3/4) Epoch 30, batch 300, loss[loss=0.1323, simple_loss=0.218, pruned_loss=0.02325, over 7149.00 frames.], tot_loss[loss=0.1689, simple_loss=0.2583, pruned_loss=0.03976, over 1105660.57 frames.], batch size: 17, lr: 5.68e-04 +2022-05-27 20:10:20,418 INFO [train.py:823] (3/4) Epoch 30, batch 350, loss[loss=0.1848, simple_loss=0.2723, pruned_loss=0.04869, over 7235.00 frames.], tot_loss[loss=0.1686, simple_loss=0.2574, pruned_loss=0.03986, over 1175753.51 frames.], batch size: 24, lr: 5.67e-04 +2022-05-27 20:10:59,268 INFO [train.py:823] (3/4) Epoch 30, batch 400, loss[loss=0.1741, simple_loss=0.28, pruned_loss=0.03407, over 7037.00 frames.], tot_loss[loss=0.1672, simple_loss=0.2562, pruned_loss=0.03914, over 1230347.90 frames.], batch size: 26, lr: 5.67e-04 +2022-05-27 20:11:38,372 INFO [train.py:823] (3/4) Epoch 30, batch 450, loss[loss=0.161, simple_loss=0.2546, pruned_loss=0.0337, over 6923.00 frames.], tot_loss[loss=0.1671, simple_loss=0.2558, pruned_loss=0.03917, over 1269023.08 frames.], batch size: 29, lr: 5.66e-04 +2022-05-27 20:12:17,490 INFO [train.py:823] (3/4) Epoch 30, batch 500, loss[loss=0.1416, simple_loss=0.2317, pruned_loss=0.02573, over 7087.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2555, pruned_loss=0.03837, over 1301861.66 frames.], batch size: 19, lr: 5.66e-04 +2022-05-27 20:12:56,810 INFO [train.py:823] (3/4) Epoch 30, batch 550, loss[loss=0.1717, simple_loss=0.2672, pruned_loss=0.03809, over 7408.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2561, pruned_loss=0.03876, over 1326328.95 frames.], batch size: 22, lr: 5.65e-04 +2022-05-27 20:13:37,265 INFO [train.py:823] (3/4) Epoch 30, batch 600, loss[loss=0.1387, simple_loss=0.2309, pruned_loss=0.02329, over 7205.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2556, pruned_loss=0.03876, over 1344019.78 frames.], batch size: 19, lr: 5.65e-04 +2022-05-27 20:14:16,361 INFO [train.py:823] (3/4) Epoch 30, batch 650, loss[loss=0.1458, simple_loss=0.2387, pruned_loss=0.02651, over 7426.00 frames.], tot_loss[loss=0.1664, simple_loss=0.2552, pruned_loss=0.03881, over 1357221.16 frames.], batch size: 22, lr: 5.64e-04 +2022-05-27 20:14:55,812 INFO [train.py:823] (3/4) Epoch 30, batch 700, loss[loss=0.1439, simple_loss=0.2297, pruned_loss=0.02902, over 7298.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2539, pruned_loss=0.03864, over 1376137.44 frames.], batch size: 19, lr: 5.64e-04 +2022-05-27 20:15:34,826 INFO [train.py:823] (3/4) Epoch 30, batch 750, loss[loss=0.1486, simple_loss=0.2323, pruned_loss=0.03248, over 7097.00 frames.], tot_loss[loss=0.1657, simple_loss=0.254, pruned_loss=0.03864, over 1383165.88 frames.], batch size: 18, lr: 5.63e-04 +2022-05-27 20:16:13,840 INFO [train.py:823] (3/4) Epoch 30, batch 800, loss[loss=0.1624, simple_loss=0.2597, pruned_loss=0.03252, over 6986.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2545, pruned_loss=0.03861, over 1393043.93 frames.], batch size: 26, lr: 5.63e-04 +2022-05-27 20:16:52,937 INFO [train.py:823] (3/4) Epoch 30, batch 850, loss[loss=0.1398, simple_loss=0.2239, pruned_loss=0.02785, over 7197.00 frames.], tot_loss[loss=0.1659, simple_loss=0.2545, pruned_loss=0.03864, over 1392240.16 frames.], batch size: 18, lr: 5.62e-04 +2022-05-27 20:17:32,102 INFO [train.py:823] (3/4) Epoch 30, batch 900, loss[loss=0.1685, simple_loss=0.2617, pruned_loss=0.03767, over 7288.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2545, pruned_loss=0.03812, over 1396429.90 frames.], batch size: 19, lr: 5.62e-04 +2022-05-27 20:18:24,272 INFO [train.py:823] (3/4) Epoch 31, batch 0, loss[loss=0.1665, simple_loss=0.2581, pruned_loss=0.03742, over 7373.00 frames.], tot_loss[loss=0.1665, simple_loss=0.2581, pruned_loss=0.03742, over 7373.00 frames.], batch size: 20, lr: 5.52e-04 +2022-05-27 20:19:03,900 INFO [train.py:823] (3/4) Epoch 31, batch 50, loss[loss=0.1436, simple_loss=0.2226, pruned_loss=0.03228, over 7191.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2502, pruned_loss=0.03684, over 324793.46 frames.], batch size: 18, lr: 5.52e-04 +2022-05-27 20:19:44,326 INFO [train.py:823] (3/4) Epoch 31, batch 100, loss[loss=0.154, simple_loss=0.2343, pruned_loss=0.03687, over 7270.00 frames.], tot_loss[loss=0.1629, simple_loss=0.252, pruned_loss=0.03683, over 565427.25 frames.], batch size: 16, lr: 5.51e-04 +2022-05-27 20:20:23,616 INFO [train.py:823] (3/4) Epoch 31, batch 150, loss[loss=0.1768, simple_loss=0.2706, pruned_loss=0.04152, over 7171.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2537, pruned_loss=0.0376, over 754198.16 frames.], batch size: 25, lr: 5.51e-04 +2022-05-27 20:21:02,315 INFO [train.py:823] (3/4) Epoch 31, batch 200, loss[loss=0.1675, simple_loss=0.2651, pruned_loss=0.03497, over 7098.00 frames.], tot_loss[loss=0.165, simple_loss=0.2544, pruned_loss=0.03781, over 898730.86 frames.], batch size: 18, lr: 5.50e-04 +2022-05-27 20:21:41,477 INFO [train.py:823] (3/4) Epoch 31, batch 250, loss[loss=0.1372, simple_loss=0.2192, pruned_loss=0.02761, over 7145.00 frames.], tot_loss[loss=0.1647, simple_loss=0.2535, pruned_loss=0.03794, over 1006096.90 frames.], batch size: 17, lr: 5.50e-04 +2022-05-27 20:22:21,796 INFO [train.py:823] (3/4) Epoch 31, batch 300, loss[loss=0.1896, simple_loss=0.2784, pruned_loss=0.05036, over 7311.00 frames.], tot_loss[loss=0.1661, simple_loss=0.2546, pruned_loss=0.03881, over 1097927.34 frames.], batch size: 22, lr: 5.49e-04 +2022-05-27 20:23:00,974 INFO [train.py:823] (3/4) Epoch 31, batch 350, loss[loss=0.1434, simple_loss=0.2301, pruned_loss=0.02833, over 7165.00 frames.], tot_loss[loss=0.1656, simple_loss=0.2542, pruned_loss=0.03853, over 1164543.82 frames.], batch size: 17, lr: 5.49e-04 +2022-05-27 20:23:41,363 INFO [train.py:823] (3/4) Epoch 31, batch 400, loss[loss=0.1713, simple_loss=0.2492, pruned_loss=0.04669, over 7376.00 frames.], tot_loss[loss=0.1651, simple_loss=0.2541, pruned_loss=0.03808, over 1225947.05 frames.], batch size: 19, lr: 5.49e-04 +2022-05-27 20:24:20,659 INFO [train.py:823] (3/4) Epoch 31, batch 450, loss[loss=0.153, simple_loss=0.2338, pruned_loss=0.03611, over 7302.00 frames.], tot_loss[loss=0.1649, simple_loss=0.2541, pruned_loss=0.03783, over 1269691.24 frames.], batch size: 18, lr: 5.48e-04 +2022-05-27 20:24:59,878 INFO [train.py:823] (3/4) Epoch 31, batch 500, loss[loss=0.1494, simple_loss=0.2462, pruned_loss=0.02633, over 7099.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2535, pruned_loss=0.03752, over 1302350.20 frames.], batch size: 18, lr: 5.48e-04 +2022-05-27 20:25:39,405 INFO [train.py:823] (3/4) Epoch 31, batch 550, loss[loss=0.1492, simple_loss=0.243, pruned_loss=0.02766, over 7393.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2532, pruned_loss=0.03752, over 1326122.70 frames.], batch size: 19, lr: 5.47e-04 +2022-05-27 20:26:18,552 INFO [train.py:823] (3/4) Epoch 31, batch 600, loss[loss=0.1478, simple_loss=0.2263, pruned_loss=0.03468, over 7284.00 frames.], tot_loss[loss=0.1648, simple_loss=0.254, pruned_loss=0.03782, over 1347173.68 frames.], batch size: 16, lr: 5.47e-04 +2022-05-27 20:26:57,645 INFO [train.py:823] (3/4) Epoch 31, batch 650, loss[loss=0.1972, simple_loss=0.284, pruned_loss=0.05523, over 7143.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2537, pruned_loss=0.03757, over 1362392.90 frames.], batch size: 22, lr: 5.46e-04 +2022-05-27 20:27:36,455 INFO [train.py:823] (3/4) Epoch 31, batch 700, loss[loss=0.1753, simple_loss=0.2512, pruned_loss=0.0497, over 7301.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2546, pruned_loss=0.03811, over 1370214.07 frames.], batch size: 17, lr: 5.46e-04 +2022-05-27 20:28:15,536 INFO [train.py:823] (3/4) Epoch 31, batch 750, loss[loss=0.1689, simple_loss=0.2409, pruned_loss=0.04841, over 7305.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2545, pruned_loss=0.03811, over 1381850.78 frames.], batch size: 18, lr: 5.45e-04 +2022-05-27 20:28:54,340 INFO [train.py:823] (3/4) Epoch 31, batch 800, loss[loss=0.1677, simple_loss=0.2505, pruned_loss=0.04244, over 6780.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2536, pruned_loss=0.03761, over 1391565.64 frames.], batch size: 15, lr: 5.45e-04 +2022-05-27 20:29:32,816 INFO [train.py:823] (3/4) Epoch 31, batch 850, loss[loss=0.1558, simple_loss=0.2553, pruned_loss=0.02808, over 7032.00 frames.], tot_loss[loss=0.165, simple_loss=0.2542, pruned_loss=0.03793, over 1390958.06 frames.], batch size: 26, lr: 5.44e-04 +2022-05-27 20:30:11,803 INFO [train.py:823] (3/4) Epoch 31, batch 900, loss[loss=0.1634, simple_loss=0.2455, pruned_loss=0.04064, over 7102.00 frames.], tot_loss[loss=0.1652, simple_loss=0.2546, pruned_loss=0.03792, over 1396763.30 frames.], batch size: 19, lr: 5.44e-04 +2022-05-27 20:31:03,427 INFO [train.py:823] (3/4) Epoch 32, batch 0, loss[loss=0.1781, simple_loss=0.2563, pruned_loss=0.04994, over 5096.00 frames.], tot_loss[loss=0.1781, simple_loss=0.2563, pruned_loss=0.04994, over 5096.00 frames.], batch size: 47, lr: 5.35e-04 +2022-05-27 20:31:42,718 INFO [train.py:823] (3/4) Epoch 32, batch 50, loss[loss=0.1469, simple_loss=0.234, pruned_loss=0.02985, over 7291.00 frames.], tot_loss[loss=0.1662, simple_loss=0.255, pruned_loss=0.03865, over 320168.68 frames.], batch size: 17, lr: 5.35e-04 +2022-05-27 20:32:21,585 INFO [train.py:823] (3/4) Epoch 32, batch 100, loss[loss=0.2134, simple_loss=0.2988, pruned_loss=0.06396, over 7165.00 frames.], tot_loss[loss=0.166, simple_loss=0.2556, pruned_loss=0.03821, over 565839.58 frames.], batch size: 22, lr: 5.34e-04 +2022-05-27 20:33:00,071 INFO [train.py:823] (3/4) Epoch 32, batch 150, loss[loss=0.1535, simple_loss=0.241, pruned_loss=0.033, over 7193.00 frames.], tot_loss[loss=0.1667, simple_loss=0.2554, pruned_loss=0.03902, over 758707.17 frames.], batch size: 19, lr: 5.34e-04 +2022-05-27 20:33:39,234 INFO [train.py:823] (3/4) Epoch 32, batch 200, loss[loss=0.1677, simple_loss=0.2534, pruned_loss=0.04102, over 7186.00 frames.], tot_loss[loss=0.1666, simple_loss=0.2554, pruned_loss=0.03889, over 904842.65 frames.], batch size: 19, lr: 5.33e-04 +2022-05-27 20:34:18,206 INFO [train.py:823] (3/4) Epoch 32, batch 250, loss[loss=0.1907, simple_loss=0.2815, pruned_loss=0.04992, over 7193.00 frames.], tot_loss[loss=0.1655, simple_loss=0.2543, pruned_loss=0.0383, over 1021132.98 frames.], batch size: 19, lr: 5.33e-04 +2022-05-27 20:34:57,825 INFO [train.py:823] (3/4) Epoch 32, batch 300, loss[loss=0.1688, simple_loss=0.2611, pruned_loss=0.03826, over 7288.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2533, pruned_loss=0.03758, over 1106561.52 frames.], batch size: 19, lr: 5.32e-04 +2022-05-27 20:35:36,836 INFO [train.py:823] (3/4) Epoch 32, batch 350, loss[loss=0.178, simple_loss=0.2513, pruned_loss=0.05232, over 7422.00 frames.], tot_loss[loss=0.1654, simple_loss=0.2544, pruned_loss=0.03823, over 1177208.82 frames.], batch size: 17, lr: 5.32e-04 +2022-05-27 20:36:16,026 INFO [train.py:823] (3/4) Epoch 32, batch 400, loss[loss=0.1516, simple_loss=0.2482, pruned_loss=0.02748, over 6662.00 frames.], tot_loss[loss=0.1658, simple_loss=0.2552, pruned_loss=0.03822, over 1227078.57 frames.], batch size: 34, lr: 5.32e-04 +2022-05-27 20:36:54,906 INFO [train.py:823] (3/4) Epoch 32, batch 450, loss[loss=0.1762, simple_loss=0.2674, pruned_loss=0.04254, over 7155.00 frames.], tot_loss[loss=0.1653, simple_loss=0.2546, pruned_loss=0.03799, over 1267454.59 frames.], batch size: 23, lr: 5.31e-04 +2022-05-27 20:37:35,423 INFO [train.py:823] (3/4) Epoch 32, batch 500, loss[loss=0.152, simple_loss=0.253, pruned_loss=0.0255, over 7197.00 frames.], tot_loss[loss=0.164, simple_loss=0.2532, pruned_loss=0.03736, over 1301559.26 frames.], batch size: 20, lr: 5.31e-04 +2022-05-27 20:38:14,349 INFO [train.py:823] (3/4) Epoch 32, batch 550, loss[loss=0.1876, simple_loss=0.2703, pruned_loss=0.05244, over 7221.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2535, pruned_loss=0.03717, over 1329602.26 frames.], batch size: 25, lr: 5.30e-04 +2022-05-27 20:38:53,783 INFO [train.py:823] (3/4) Epoch 32, batch 600, loss[loss=0.1491, simple_loss=0.2337, pruned_loss=0.03227, over 7277.00 frames.], tot_loss[loss=0.1643, simple_loss=0.2538, pruned_loss=0.03736, over 1350392.23 frames.], batch size: 17, lr: 5.30e-04 +2022-05-27 20:39:32,671 INFO [train.py:823] (3/4) Epoch 32, batch 650, loss[loss=0.161, simple_loss=0.2574, pruned_loss=0.03234, over 7033.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2537, pruned_loss=0.03755, over 1362886.25 frames.], batch size: 26, lr: 5.29e-04 +2022-05-27 20:40:11,860 INFO [train.py:823] (3/4) Epoch 32, batch 700, loss[loss=0.1719, simple_loss=0.2655, pruned_loss=0.0392, over 7106.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2532, pruned_loss=0.0376, over 1379065.77 frames.], batch size: 20, lr: 5.29e-04 +2022-05-27 20:40:50,485 INFO [train.py:823] (3/4) Epoch 32, batch 750, loss[loss=0.1311, simple_loss=0.2274, pruned_loss=0.01744, over 7396.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2528, pruned_loss=0.03723, over 1389247.21 frames.], batch size: 19, lr: 5.29e-04 +2022-05-27 20:41:30,254 INFO [train.py:823] (3/4) Epoch 32, batch 800, loss[loss=0.14, simple_loss=0.2159, pruned_loss=0.03205, over 7147.00 frames.], tot_loss[loss=0.1644, simple_loss=0.2533, pruned_loss=0.03772, over 1397411.59 frames.], batch size: 17, lr: 5.28e-04 +2022-05-27 20:42:10,641 INFO [train.py:823] (3/4) Epoch 32, batch 850, loss[loss=0.1388, simple_loss=0.2283, pruned_loss=0.02469, over 7020.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2526, pruned_loss=0.03726, over 1400615.84 frames.], batch size: 17, lr: 5.28e-04 +2022-05-27 20:42:49,962 INFO [train.py:823] (3/4) Epoch 32, batch 900, loss[loss=0.1414, simple_loss=0.2283, pruned_loss=0.02722, over 7030.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2533, pruned_loss=0.03723, over 1406418.16 frames.], batch size: 17, lr: 5.27e-04 +2022-05-27 20:43:44,010 INFO [train.py:823] (3/4) Epoch 33, batch 0, loss[loss=0.163, simple_loss=0.2473, pruned_loss=0.03934, over 6842.00 frames.], tot_loss[loss=0.163, simple_loss=0.2473, pruned_loss=0.03934, over 6842.00 frames.], batch size: 29, lr: 5.19e-04 +2022-05-27 20:44:22,792 INFO [train.py:823] (3/4) Epoch 33, batch 50, loss[loss=0.134, simple_loss=0.2185, pruned_loss=0.02475, over 7158.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2518, pruned_loss=0.03671, over 317003.75 frames.], batch size: 17, lr: 5.18e-04 +2022-05-27 20:45:02,636 INFO [train.py:823] (3/4) Epoch 33, batch 100, loss[loss=0.1419, simple_loss=0.2217, pruned_loss=0.03098, over 6814.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2517, pruned_loss=0.03629, over 560891.57 frames.], batch size: 15, lr: 5.18e-04 +2022-05-27 20:45:41,738 INFO [train.py:823] (3/4) Epoch 33, batch 150, loss[loss=0.1713, simple_loss=0.2617, pruned_loss=0.04043, over 7184.00 frames.], tot_loss[loss=0.1642, simple_loss=0.2544, pruned_loss=0.03701, over 750112.44 frames.], batch size: 21, lr: 5.18e-04 +2022-05-27 20:46:21,896 INFO [train.py:823] (3/4) Epoch 33, batch 200, loss[loss=0.1902, simple_loss=0.2826, pruned_loss=0.04888, over 7112.00 frames.], tot_loss[loss=0.1641, simple_loss=0.2536, pruned_loss=0.03731, over 894321.08 frames.], batch size: 20, lr: 5.17e-04 +2022-05-27 20:47:00,820 INFO [train.py:823] (3/4) Epoch 33, batch 250, loss[loss=0.1819, simple_loss=0.2783, pruned_loss=0.04272, over 7156.00 frames.], tot_loss[loss=0.163, simple_loss=0.2522, pruned_loss=0.03689, over 1014440.73 frames.], batch size: 23, lr: 5.17e-04 +2022-05-27 20:47:39,825 INFO [train.py:823] (3/4) Epoch 33, batch 300, loss[loss=0.1421, simple_loss=0.2177, pruned_loss=0.03321, over 7158.00 frames.], tot_loss[loss=0.163, simple_loss=0.2524, pruned_loss=0.03683, over 1107539.13 frames.], batch size: 17, lr: 5.16e-04 +2022-05-27 20:48:19,060 INFO [train.py:823] (3/4) Epoch 33, batch 350, loss[loss=0.1617, simple_loss=0.2584, pruned_loss=0.03249, over 7328.00 frames.], tot_loss[loss=0.1639, simple_loss=0.2532, pruned_loss=0.03727, over 1176919.42 frames.], batch size: 23, lr: 5.16e-04 +2022-05-27 20:48:57,910 INFO [train.py:823] (3/4) Epoch 33, batch 400, loss[loss=0.1658, simple_loss=0.2653, pruned_loss=0.0331, over 7410.00 frames.], tot_loss[loss=0.164, simple_loss=0.2535, pruned_loss=0.03726, over 1230839.78 frames.], batch size: 22, lr: 5.16e-04 +2022-05-27 20:49:37,019 INFO [train.py:823] (3/4) Epoch 33, batch 450, loss[loss=0.1368, simple_loss=0.2302, pruned_loss=0.02168, over 7294.00 frames.], tot_loss[loss=0.164, simple_loss=0.254, pruned_loss=0.03702, over 1272007.03 frames.], batch size: 19, lr: 5.15e-04 +2022-05-27 20:50:15,636 INFO [train.py:823] (3/4) Epoch 33, batch 500, loss[loss=0.1365, simple_loss=0.2316, pruned_loss=0.02076, over 6842.00 frames.], tot_loss[loss=0.164, simple_loss=0.2538, pruned_loss=0.03705, over 1306168.48 frames.], batch size: 29, lr: 5.15e-04 +2022-05-27 20:50:54,733 INFO [train.py:823] (3/4) Epoch 33, batch 550, loss[loss=0.1666, simple_loss=0.2524, pruned_loss=0.04037, over 7400.00 frames.], tot_loss[loss=0.1637, simple_loss=0.2533, pruned_loss=0.037, over 1334400.97 frames.], batch size: 19, lr: 5.14e-04 +2022-05-27 20:51:33,956 INFO [train.py:823] (3/4) Epoch 33, batch 600, loss[loss=0.1499, simple_loss=0.2476, pruned_loss=0.02613, over 7427.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2519, pruned_loss=0.03661, over 1354023.43 frames.], batch size: 22, lr: 5.14e-04 +2022-05-27 20:52:12,891 INFO [train.py:823] (3/4) Epoch 33, batch 650, loss[loss=0.1554, simple_loss=0.2388, pruned_loss=0.03597, over 7147.00 frames.], tot_loss[loss=0.162, simple_loss=0.2512, pruned_loss=0.03638, over 1372731.08 frames.], batch size: 17, lr: 5.14e-04 +2022-05-27 20:52:51,797 INFO [train.py:823] (3/4) Epoch 33, batch 700, loss[loss=0.1675, simple_loss=0.2602, pruned_loss=0.03742, over 6438.00 frames.], tot_loss[loss=0.162, simple_loss=0.2518, pruned_loss=0.03613, over 1384069.37 frames.], batch size: 34, lr: 5.13e-04 +2022-05-27 20:53:30,746 INFO [train.py:823] (3/4) Epoch 33, batch 750, loss[loss=0.1994, simple_loss=0.284, pruned_loss=0.05738, over 7188.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2531, pruned_loss=0.03675, over 1391274.34 frames.], batch size: 25, lr: 5.13e-04 +2022-05-27 20:54:09,388 INFO [train.py:823] (3/4) Epoch 33, batch 800, loss[loss=0.1656, simple_loss=0.2611, pruned_loss=0.03508, over 7179.00 frames.], tot_loss[loss=0.1645, simple_loss=0.2544, pruned_loss=0.03725, over 1391842.47 frames.], batch size: 22, lr: 5.12e-04 +2022-05-27 20:54:48,150 INFO [train.py:823] (3/4) Epoch 33, batch 850, loss[loss=0.1794, simple_loss=0.2592, pruned_loss=0.04975, over 7094.00 frames.], tot_loss[loss=0.1636, simple_loss=0.2532, pruned_loss=0.03701, over 1400399.25 frames.], batch size: 18, lr: 5.12e-04 +2022-05-27 20:55:26,938 INFO [train.py:823] (3/4) Epoch 33, batch 900, loss[loss=0.149, simple_loss=0.2264, pruned_loss=0.03577, over 7021.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2522, pruned_loss=0.03653, over 1402069.81 frames.], batch size: 16, lr: 5.12e-04 +2022-05-27 20:56:18,086 INFO [train.py:823] (3/4) Epoch 34, batch 0, loss[loss=0.1767, simple_loss=0.2684, pruned_loss=0.0425, over 7206.00 frames.], tot_loss[loss=0.1767, simple_loss=0.2684, pruned_loss=0.0425, over 7206.00 frames.], batch size: 24, lr: 5.04e-04 +2022-05-27 20:56:56,730 INFO [train.py:823] (3/4) Epoch 34, batch 50, loss[loss=0.1535, simple_loss=0.2409, pruned_loss=0.03304, over 6788.00 frames.], tot_loss[loss=0.16, simple_loss=0.2493, pruned_loss=0.03539, over 320062.37 frames.], batch size: 15, lr: 5.03e-04 +2022-05-27 20:57:36,585 INFO [train.py:823] (3/4) Epoch 34, batch 100, loss[loss=0.1531, simple_loss=0.2568, pruned_loss=0.02466, over 7285.00 frames.], tot_loss[loss=0.1631, simple_loss=0.2522, pruned_loss=0.03698, over 561009.64 frames.], batch size: 21, lr: 5.03e-04 +2022-05-27 20:58:15,784 INFO [train.py:823] (3/4) Epoch 34, batch 150, loss[loss=0.1772, simple_loss=0.2721, pruned_loss=0.04111, over 7308.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2532, pruned_loss=0.03675, over 754516.39 frames.], batch size: 22, lr: 5.02e-04 +2022-05-27 20:58:54,910 INFO [train.py:823] (3/4) Epoch 34, batch 200, loss[loss=0.1544, simple_loss=0.2477, pruned_loss=0.03058, over 7005.00 frames.], tot_loss[loss=0.163, simple_loss=0.2527, pruned_loss=0.03665, over 902434.22 frames.], batch size: 26, lr: 5.02e-04 +2022-05-27 20:59:34,293 INFO [train.py:823] (3/4) Epoch 34, batch 250, loss[loss=0.1734, simple_loss=0.2619, pruned_loss=0.04245, over 7032.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2524, pruned_loss=0.03705, over 1013202.21 frames.], batch size: 26, lr: 5.02e-04 +2022-05-27 21:00:13,166 INFO [train.py:823] (3/4) Epoch 34, batch 300, loss[loss=0.16, simple_loss=0.2524, pruned_loss=0.03381, over 7375.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2518, pruned_loss=0.0367, over 1103721.11 frames.], batch size: 21, lr: 5.01e-04 +2022-05-27 21:00:53,364 INFO [train.py:823] (3/4) Epoch 34, batch 350, loss[loss=0.1515, simple_loss=0.2471, pruned_loss=0.0279, over 7100.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2519, pruned_loss=0.03662, over 1169976.21 frames.], batch size: 19, lr: 5.01e-04 +2022-05-27 21:01:32,641 INFO [train.py:823] (3/4) Epoch 34, batch 400, loss[loss=0.152, simple_loss=0.2457, pruned_loss=0.02914, over 7275.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2517, pruned_loss=0.03635, over 1224368.22 frames.], batch size: 21, lr: 5.00e-04 +2022-05-27 21:02:11,980 INFO [train.py:823] (3/4) Epoch 34, batch 450, loss[loss=0.2, simple_loss=0.2874, pruned_loss=0.0563, over 7275.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2522, pruned_loss=0.03667, over 1270064.96 frames.], batch size: 20, lr: 5.00e-04 +2022-05-27 21:02:51,518 INFO [train.py:823] (3/4) Epoch 34, batch 500, loss[loss=0.158, simple_loss=0.2505, pruned_loss=0.03278, over 7163.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2515, pruned_loss=0.03641, over 1303480.65 frames.], batch size: 23, lr: 5.00e-04 +2022-05-27 21:03:31,216 INFO [train.py:823] (3/4) Epoch 34, batch 550, loss[loss=0.159, simple_loss=0.2488, pruned_loss=0.03457, over 7184.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2515, pruned_loss=0.03638, over 1335243.82 frames.], batch size: 25, lr: 4.99e-04 +2022-05-27 21:04:10,491 INFO [train.py:823] (3/4) Epoch 34, batch 600, loss[loss=0.1579, simple_loss=0.234, pruned_loss=0.04091, over 7301.00 frames.], tot_loss[loss=0.1622, simple_loss=0.2517, pruned_loss=0.03634, over 1352678.36 frames.], batch size: 17, lr: 4.99e-04 +2022-05-27 21:04:51,456 INFO [train.py:823] (3/4) Epoch 34, batch 650, loss[loss=0.1415, simple_loss=0.2457, pruned_loss=0.01861, over 6939.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2505, pruned_loss=0.03589, over 1367397.89 frames.], batch size: 29, lr: 4.99e-04 +2022-05-27 21:05:35,677 INFO [train.py:823] (3/4) Epoch 34, batch 700, loss[loss=0.1497, simple_loss=0.2381, pruned_loss=0.03066, over 7374.00 frames.], tot_loss[loss=0.1616, simple_loss=0.251, pruned_loss=0.03609, over 1377208.98 frames.], batch size: 20, lr: 4.98e-04 +2022-05-27 21:06:14,479 INFO [train.py:823] (3/4) Epoch 34, batch 750, loss[loss=0.1491, simple_loss=0.2354, pruned_loss=0.0314, over 6995.00 frames.], tot_loss[loss=0.1626, simple_loss=0.2518, pruned_loss=0.03676, over 1388855.59 frames.], batch size: 16, lr: 4.98e-04 +2022-05-27 21:06:53,531 INFO [train.py:823] (3/4) Epoch 34, batch 800, loss[loss=0.1521, simple_loss=0.2453, pruned_loss=0.02941, over 7199.00 frames.], tot_loss[loss=0.1633, simple_loss=0.2522, pruned_loss=0.03723, over 1396554.10 frames.], batch size: 19, lr: 4.97e-04 +2022-05-27 21:07:32,186 INFO [train.py:823] (3/4) Epoch 34, batch 850, loss[loss=0.1877, simple_loss=0.2748, pruned_loss=0.05027, over 7374.00 frames.], tot_loss[loss=0.1634, simple_loss=0.2528, pruned_loss=0.03703, over 1396406.19 frames.], batch size: 21, lr: 4.97e-04 +2022-05-27 21:08:12,989 INFO [train.py:823] (3/4) Epoch 34, batch 900, loss[loss=0.1283, simple_loss=0.2137, pruned_loss=0.02147, over 7091.00 frames.], tot_loss[loss=0.1628, simple_loss=0.2522, pruned_loss=0.03673, over 1400626.42 frames.], batch size: 18, lr: 4.97e-04 +2022-05-27 21:09:07,193 INFO [train.py:823] (3/4) Epoch 35, batch 0, loss[loss=0.1845, simple_loss=0.2744, pruned_loss=0.04727, over 7180.00 frames.], tot_loss[loss=0.1845, simple_loss=0.2744, pruned_loss=0.04727, over 7180.00 frames.], batch size: 21, lr: 4.89e-04 +2022-05-27 21:09:48,012 INFO [train.py:823] (3/4) Epoch 35, batch 50, loss[loss=0.1381, simple_loss=0.2232, pruned_loss=0.02648, over 7182.00 frames.], tot_loss[loss=0.1635, simple_loss=0.2541, pruned_loss=0.03646, over 323302.94 frames.], batch size: 18, lr: 4.89e-04 +2022-05-27 21:10:26,980 INFO [train.py:823] (3/4) Epoch 35, batch 100, loss[loss=0.1421, simple_loss=0.237, pruned_loss=0.02361, over 6328.00 frames.], tot_loss[loss=0.1633, simple_loss=0.253, pruned_loss=0.0368, over 567933.91 frames.], batch size: 34, lr: 4.88e-04 +2022-05-27 21:11:06,211 INFO [train.py:823] (3/4) Epoch 35, batch 150, loss[loss=0.1777, simple_loss=0.264, pruned_loss=0.0457, over 7198.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2523, pruned_loss=0.03638, over 753093.92 frames.], batch size: 25, lr: 4.88e-04 +2022-05-27 21:11:44,985 INFO [train.py:823] (3/4) Epoch 35, batch 200, loss[loss=0.1543, simple_loss=0.2554, pruned_loss=0.02665, over 6956.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2514, pruned_loss=0.03566, over 902759.65 frames.], batch size: 29, lr: 4.88e-04 +2022-05-27 21:12:24,221 INFO [train.py:823] (3/4) Epoch 35, batch 250, loss[loss=0.182, simple_loss=0.2708, pruned_loss=0.04661, over 7254.00 frames.], tot_loss[loss=0.1603, simple_loss=0.2506, pruned_loss=0.035, over 1013149.23 frames.], batch size: 24, lr: 4.87e-04 +2022-05-27 21:13:03,229 INFO [train.py:823] (3/4) Epoch 35, batch 300, loss[loss=0.1648, simple_loss=0.2527, pruned_loss=0.0384, over 7284.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2515, pruned_loss=0.03542, over 1105209.82 frames.], batch size: 21, lr: 4.87e-04 +2022-05-27 21:13:42,446 INFO [train.py:823] (3/4) Epoch 35, batch 350, loss[loss=0.1448, simple_loss=0.242, pruned_loss=0.02384, over 7087.00 frames.], tot_loss[loss=0.1617, simple_loss=0.2521, pruned_loss=0.03564, over 1170410.36 frames.], batch size: 18, lr: 4.87e-04 +2022-05-27 21:14:21,164 INFO [train.py:823] (3/4) Epoch 35, batch 400, loss[loss=0.167, simple_loss=0.2566, pruned_loss=0.03872, over 7169.00 frames.], tot_loss[loss=0.1621, simple_loss=0.2521, pruned_loss=0.03606, over 1222130.86 frames.], batch size: 22, lr: 4.86e-04 +2022-05-27 21:15:00,076 INFO [train.py:823] (3/4) Epoch 35, batch 450, loss[loss=0.1544, simple_loss=0.2313, pruned_loss=0.03874, over 7294.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2512, pruned_loss=0.03559, over 1269727.05 frames.], batch size: 17, lr: 4.86e-04 +2022-05-27 21:15:38,810 INFO [train.py:823] (3/4) Epoch 35, batch 500, loss[loss=0.1518, simple_loss=0.2374, pruned_loss=0.03306, over 7432.00 frames.], tot_loss[loss=0.1609, simple_loss=0.251, pruned_loss=0.03537, over 1304706.81 frames.], batch size: 18, lr: 4.86e-04 +2022-05-27 21:16:17,746 INFO [train.py:823] (3/4) Epoch 35, batch 550, loss[loss=0.1421, simple_loss=0.2248, pruned_loss=0.0297, over 7026.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2507, pruned_loss=0.03577, over 1328127.99 frames.], batch size: 17, lr: 4.85e-04 +2022-05-27 21:16:57,130 INFO [train.py:823] (3/4) Epoch 35, batch 600, loss[loss=0.1714, simple_loss=0.2666, pruned_loss=0.03816, over 7278.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2509, pruned_loss=0.03607, over 1349375.55 frames.], batch size: 20, lr: 4.85e-04 +2022-05-27 21:17:36,335 INFO [train.py:823] (3/4) Epoch 35, batch 650, loss[loss=0.1493, simple_loss=0.2544, pruned_loss=0.02207, over 7066.00 frames.], tot_loss[loss=0.1612, simple_loss=0.2505, pruned_loss=0.03592, over 1367975.95 frames.], batch size: 26, lr: 4.84e-04 +2022-05-27 21:18:15,642 INFO [train.py:823] (3/4) Epoch 35, batch 700, loss[loss=0.1565, simple_loss=0.2588, pruned_loss=0.02711, over 7274.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2502, pruned_loss=0.03581, over 1377360.00 frames.], batch size: 20, lr: 4.84e-04 +2022-05-27 21:18:54,811 INFO [train.py:823] (3/4) Epoch 35, batch 750, loss[loss=0.1389, simple_loss=0.2326, pruned_loss=0.02263, over 7087.00 frames.], tot_loss[loss=0.1619, simple_loss=0.2512, pruned_loss=0.03635, over 1390322.26 frames.], batch size: 19, lr: 4.84e-04 +2022-05-27 21:19:32,976 INFO [train.py:823] (3/4) Epoch 35, batch 800, loss[loss=0.1544, simple_loss=0.2384, pruned_loss=0.03526, over 7310.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2505, pruned_loss=0.03603, over 1394648.90 frames.], batch size: 18, lr: 4.83e-04 +2022-05-27 21:20:12,107 INFO [train.py:823] (3/4) Epoch 35, batch 850, loss[loss=0.1746, simple_loss=0.2713, pruned_loss=0.03896, over 7420.00 frames.], tot_loss[loss=0.1613, simple_loss=0.2505, pruned_loss=0.03608, over 1402925.95 frames.], batch size: 22, lr: 4.83e-04 +2022-05-27 21:20:50,695 INFO [train.py:823] (3/4) Epoch 35, batch 900, loss[loss=0.1589, simple_loss=0.2535, pruned_loss=0.0321, over 6641.00 frames.], tot_loss[loss=0.1611, simple_loss=0.25, pruned_loss=0.03607, over 1400744.95 frames.], batch size: 34, lr: 4.83e-04 +2022-05-27 21:21:29,624 INFO [train.py:823] (3/4) Epoch 35, batch 950, loss[loss=0.1581, simple_loss=0.2492, pruned_loss=0.03347, over 5082.00 frames.], tot_loss[loss=0.1623, simple_loss=0.2515, pruned_loss=0.03654, over 1379662.49 frames.], batch size: 47, lr: 4.82e-04 +2022-05-27 21:21:42,977 INFO [train.py:823] (3/4) Epoch 36, batch 0, loss[loss=0.172, simple_loss=0.2619, pruned_loss=0.04105, over 7409.00 frames.], tot_loss[loss=0.172, simple_loss=0.2619, pruned_loss=0.04105, over 7409.00 frames.], batch size: 22, lr: 4.76e-04 +2022-05-27 21:22:22,384 INFO [train.py:823] (3/4) Epoch 36, batch 50, loss[loss=0.1351, simple_loss=0.2248, pruned_loss=0.02276, over 7145.00 frames.], tot_loss[loss=0.1583, simple_loss=0.248, pruned_loss=0.03432, over 318861.83 frames.], batch size: 17, lr: 4.75e-04 +2022-05-27 21:23:01,912 INFO [train.py:823] (3/4) Epoch 36, batch 100, loss[loss=0.1638, simple_loss=0.2571, pruned_loss=0.03527, over 6634.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2483, pruned_loss=0.03424, over 564560.45 frames.], batch size: 34, lr: 4.75e-04 +2022-05-27 21:23:40,644 INFO [train.py:823] (3/4) Epoch 36, batch 150, loss[loss=0.1482, simple_loss=0.2468, pruned_loss=0.02479, over 7215.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2501, pruned_loss=0.03535, over 751771.14 frames.], batch size: 25, lr: 4.74e-04 +2022-05-27 21:24:21,447 INFO [train.py:823] (3/4) Epoch 36, batch 200, loss[loss=0.1426, simple_loss=0.2195, pruned_loss=0.03285, over 7293.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2493, pruned_loss=0.03575, over 900198.36 frames.], batch size: 17, lr: 4.74e-04 +2022-05-27 21:25:00,031 INFO [train.py:823] (3/4) Epoch 36, batch 250, loss[loss=0.1552, simple_loss=0.2443, pruned_loss=0.03299, over 7395.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2494, pruned_loss=0.03616, over 1013143.97 frames.], batch size: 19, lr: 4.74e-04 +2022-05-27 21:25:39,305 INFO [train.py:823] (3/4) Epoch 36, batch 300, loss[loss=0.1671, simple_loss=0.2623, pruned_loss=0.03593, over 7328.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2497, pruned_loss=0.03599, over 1101459.63 frames.], batch size: 23, lr: 4.73e-04 +2022-05-27 21:26:19,010 INFO [train.py:823] (3/4) Epoch 36, batch 350, loss[loss=0.1646, simple_loss=0.2567, pruned_loss=0.03625, over 7386.00 frames.], tot_loss[loss=0.1615, simple_loss=0.2508, pruned_loss=0.03611, over 1171443.18 frames.], batch size: 20, lr: 4.73e-04 +2022-05-27 21:26:58,356 INFO [train.py:823] (3/4) Epoch 36, batch 400, loss[loss=0.1584, simple_loss=0.252, pruned_loss=0.0324, over 7090.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2508, pruned_loss=0.03568, over 1226565.18 frames.], batch size: 18, lr: 4.73e-04 +2022-05-27 21:27:39,306 INFO [train.py:823] (3/4) Epoch 36, batch 450, loss[loss=0.147, simple_loss=0.2396, pruned_loss=0.0272, over 7038.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2504, pruned_loss=0.03587, over 1269067.13 frames.], batch size: 26, lr: 4.72e-04 +2022-05-27 21:28:18,470 INFO [train.py:823] (3/4) Epoch 36, batch 500, loss[loss=0.1629, simple_loss=0.2542, pruned_loss=0.03585, over 7234.00 frames.], tot_loss[loss=0.161, simple_loss=0.2501, pruned_loss=0.0359, over 1299460.19 frames.], batch size: 24, lr: 4.72e-04 +2022-05-27 21:28:57,588 INFO [train.py:823] (3/4) Epoch 36, batch 550, loss[loss=0.1551, simple_loss=0.2305, pruned_loss=0.03988, over 7286.00 frames.], tot_loss[loss=0.1607, simple_loss=0.25, pruned_loss=0.03572, over 1326779.79 frames.], batch size: 17, lr: 4.72e-04 +2022-05-27 21:29:37,171 INFO [train.py:823] (3/4) Epoch 36, batch 600, loss[loss=0.1419, simple_loss=0.2242, pruned_loss=0.02977, over 7304.00 frames.], tot_loss[loss=0.1614, simple_loss=0.2505, pruned_loss=0.03616, over 1345371.07 frames.], batch size: 17, lr: 4.71e-04 +2022-05-27 21:30:16,466 INFO [train.py:823] (3/4) Epoch 36, batch 650, loss[loss=0.1559, simple_loss=0.2441, pruned_loss=0.0339, over 7367.00 frames.], tot_loss[loss=0.1625, simple_loss=0.2518, pruned_loss=0.03655, over 1361303.73 frames.], batch size: 21, lr: 4.71e-04 +2022-05-27 21:30:56,852 INFO [train.py:823] (3/4) Epoch 36, batch 700, loss[loss=0.1502, simple_loss=0.2338, pruned_loss=0.03332, over 7294.00 frames.], tot_loss[loss=0.162, simple_loss=0.2514, pruned_loss=0.03631, over 1377180.21 frames.], batch size: 17, lr: 4.71e-04 +2022-05-27 21:31:36,089 INFO [train.py:823] (3/4) Epoch 36, batch 750, loss[loss=0.1727, simple_loss=0.2704, pruned_loss=0.03745, over 7269.00 frames.], tot_loss[loss=0.1618, simple_loss=0.251, pruned_loss=0.0363, over 1387429.34 frames.], batch size: 21, lr: 4.70e-04 +2022-05-27 21:32:16,410 INFO [train.py:823] (3/4) Epoch 36, batch 800, loss[loss=0.1608, simple_loss=0.2574, pruned_loss=0.0321, over 7378.00 frames.], tot_loss[loss=0.162, simple_loss=0.2512, pruned_loss=0.03639, over 1387645.79 frames.], batch size: 21, lr: 4.70e-04 +2022-05-27 21:32:55,460 INFO [train.py:823] (3/4) Epoch 36, batch 850, loss[loss=0.1623, simple_loss=0.2542, pruned_loss=0.03523, over 7347.00 frames.], tot_loss[loss=0.1611, simple_loss=0.2501, pruned_loss=0.03601, over 1388923.25 frames.], batch size: 23, lr: 4.70e-04 +2022-05-27 21:33:34,443 INFO [train.py:823] (3/4) Epoch 36, batch 900, loss[loss=0.141, simple_loss=0.245, pruned_loss=0.0185, over 7406.00 frames.], tot_loss[loss=0.161, simple_loss=0.2507, pruned_loss=0.03568, over 1396891.69 frames.], batch size: 22, lr: 4.69e-04 +2022-05-27 21:34:27,399 INFO [train.py:823] (3/4) Epoch 37, batch 0, loss[loss=0.161, simple_loss=0.2613, pruned_loss=0.03035, over 6434.00 frames.], tot_loss[loss=0.161, simple_loss=0.2613, pruned_loss=0.03035, over 6434.00 frames.], batch size: 34, lr: 4.63e-04 +2022-05-27 21:35:06,624 INFO [train.py:823] (3/4) Epoch 37, batch 50, loss[loss=0.1793, simple_loss=0.2703, pruned_loss=0.04418, over 7291.00 frames.], tot_loss[loss=0.1627, simple_loss=0.2537, pruned_loss=0.03588, over 318466.80 frames.], batch size: 22, lr: 4.62e-04 +2022-05-27 21:35:45,373 INFO [train.py:823] (3/4) Epoch 37, batch 100, loss[loss=0.1639, simple_loss=0.2535, pruned_loss=0.03722, over 7220.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2505, pruned_loss=0.0349, over 561835.44 frames.], batch size: 24, lr: 4.62e-04 +2022-05-27 21:36:24,708 INFO [train.py:823] (3/4) Epoch 37, batch 150, loss[loss=0.1716, simple_loss=0.2674, pruned_loss=0.03787, over 7176.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2505, pruned_loss=0.03525, over 750047.09 frames.], batch size: 21, lr: 4.62e-04 +2022-05-27 21:37:04,067 INFO [train.py:823] (3/4) Epoch 37, batch 200, loss[loss=0.2097, simple_loss=0.2865, pruned_loss=0.06639, over 7248.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2498, pruned_loss=0.03592, over 902529.57 frames.], batch size: 24, lr: 4.61e-04 +2022-05-27 21:37:43,545 INFO [train.py:823] (3/4) Epoch 37, batch 250, loss[loss=0.1537, simple_loss=0.2565, pruned_loss=0.02544, over 7001.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2494, pruned_loss=0.03492, over 1019179.88 frames.], batch size: 26, lr: 4.61e-04 +2022-05-27 21:38:22,831 INFO [train.py:823] (3/4) Epoch 37, batch 300, loss[loss=0.1245, simple_loss=0.2091, pruned_loss=0.01996, over 6987.00 frames.], tot_loss[loss=0.1596, simple_loss=0.249, pruned_loss=0.03509, over 1104283.98 frames.], batch size: 16, lr: 4.61e-04 +2022-05-27 21:39:02,462 INFO [train.py:823] (3/4) Epoch 37, batch 350, loss[loss=0.1719, simple_loss=0.27, pruned_loss=0.03687, over 7192.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2494, pruned_loss=0.03501, over 1172535.64 frames.], batch size: 25, lr: 4.60e-04 +2022-05-27 21:39:41,253 INFO [train.py:823] (3/4) Epoch 37, batch 400, loss[loss=0.1371, simple_loss=0.2161, pruned_loss=0.02905, over 7286.00 frames.], tot_loss[loss=0.1601, simple_loss=0.2498, pruned_loss=0.03519, over 1229058.43 frames.], batch size: 17, lr: 4.60e-04 +2022-05-27 21:40:19,857 INFO [train.py:823] (3/4) Epoch 37, batch 450, loss[loss=0.1586, simple_loss=0.2419, pruned_loss=0.03767, over 7202.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2503, pruned_loss=0.03519, over 1268455.24 frames.], batch size: 19, lr: 4.60e-04 +2022-05-27 21:40:59,002 INFO [train.py:823] (3/4) Epoch 37, batch 500, loss[loss=0.1408, simple_loss=0.2332, pruned_loss=0.02426, over 7017.00 frames.], tot_loss[loss=0.1598, simple_loss=0.2502, pruned_loss=0.03469, over 1303959.84 frames.], batch size: 16, lr: 4.59e-04 +2022-05-27 21:41:38,518 INFO [train.py:823] (3/4) Epoch 37, batch 550, loss[loss=0.1319, simple_loss=0.214, pruned_loss=0.02491, over 7012.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2498, pruned_loss=0.03473, over 1329430.64 frames.], batch size: 16, lr: 4.59e-04 +2022-05-27 21:42:17,376 INFO [train.py:823] (3/4) Epoch 37, batch 600, loss[loss=0.1801, simple_loss=0.2709, pruned_loss=0.04464, over 7349.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2511, pruned_loss=0.03525, over 1348783.18 frames.], batch size: 23, lr: 4.59e-04 +2022-05-27 21:42:55,860 INFO [train.py:823] (3/4) Epoch 37, batch 650, loss[loss=0.1454, simple_loss=0.2282, pruned_loss=0.0313, over 7169.00 frames.], tot_loss[loss=0.1605, simple_loss=0.2505, pruned_loss=0.03524, over 1364043.32 frames.], batch size: 17, lr: 4.58e-04 +2022-05-27 21:43:34,824 INFO [train.py:823] (3/4) Epoch 37, batch 700, loss[loss=0.1569, simple_loss=0.2563, pruned_loss=0.02875, over 7409.00 frames.], tot_loss[loss=0.1609, simple_loss=0.2512, pruned_loss=0.03524, over 1371898.30 frames.], batch size: 22, lr: 4.58e-04 +2022-05-27 21:44:14,186 INFO [train.py:823] (3/4) Epoch 37, batch 750, loss[loss=0.1633, simple_loss=0.2548, pruned_loss=0.03589, over 4866.00 frames.], tot_loss[loss=0.16, simple_loss=0.2504, pruned_loss=0.03484, over 1379864.06 frames.], batch size: 46, lr: 4.58e-04 +2022-05-27 21:44:53,047 INFO [train.py:823] (3/4) Epoch 37, batch 800, loss[loss=0.1741, simple_loss=0.2719, pruned_loss=0.03815, over 7286.00 frames.], tot_loss[loss=0.1608, simple_loss=0.2507, pruned_loss=0.03547, over 1385468.60 frames.], batch size: 21, lr: 4.57e-04 +2022-05-27 21:45:31,936 INFO [train.py:823] (3/4) Epoch 37, batch 850, loss[loss=0.1567, simple_loss=0.2378, pruned_loss=0.03777, over 7224.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2501, pruned_loss=0.03521, over 1387311.87 frames.], batch size: 16, lr: 4.57e-04 +2022-05-27 21:46:10,815 INFO [train.py:823] (3/4) Epoch 37, batch 900, loss[loss=0.2362, simple_loss=0.3305, pruned_loss=0.07096, over 7153.00 frames.], tot_loss[loss=0.16, simple_loss=0.2498, pruned_loss=0.03511, over 1394107.53 frames.], batch size: 23, lr: 4.57e-04 +2022-05-27 21:47:05,050 INFO [train.py:823] (3/4) Epoch 38, batch 0, loss[loss=0.1576, simple_loss=0.2471, pruned_loss=0.03407, over 7392.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2471, pruned_loss=0.03407, over 7392.00 frames.], batch size: 19, lr: 4.50e-04 +2022-05-27 21:47:44,001 INFO [train.py:823] (3/4) Epoch 38, batch 50, loss[loss=0.1566, simple_loss=0.2525, pruned_loss=0.03038, over 7109.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2498, pruned_loss=0.03431, over 322555.24 frames.], batch size: 19, lr: 4.50e-04 +2022-05-27 21:48:24,770 INFO [train.py:823] (3/4) Epoch 38, batch 100, loss[loss=0.1551, simple_loss=0.248, pruned_loss=0.0311, over 7352.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2484, pruned_loss=0.03422, over 565394.95 frames.], batch size: 23, lr: 4.50e-04 +2022-05-27 21:49:03,924 INFO [train.py:823] (3/4) Epoch 38, batch 150, loss[loss=0.1692, simple_loss=0.2718, pruned_loss=0.03329, over 6972.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2468, pruned_loss=0.03341, over 754111.14 frames.], batch size: 26, lr: 4.50e-04 +2022-05-27 21:49:43,236 INFO [train.py:823] (3/4) Epoch 38, batch 200, loss[loss=0.1784, simple_loss=0.2728, pruned_loss=0.04206, over 6488.00 frames.], tot_loss[loss=0.1569, simple_loss=0.247, pruned_loss=0.03335, over 901627.93 frames.], batch size: 34, lr: 4.49e-04 +2022-05-27 21:50:22,164 INFO [train.py:823] (3/4) Epoch 38, batch 250, loss[loss=0.1674, simple_loss=0.2554, pruned_loss=0.03972, over 7107.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2472, pruned_loss=0.03347, over 1020877.45 frames.], batch size: 20, lr: 4.49e-04 +2022-05-27 21:51:03,070 INFO [train.py:823] (3/4) Epoch 38, batch 300, loss[loss=0.1822, simple_loss=0.2663, pruned_loss=0.04904, over 7281.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2487, pruned_loss=0.03435, over 1107496.05 frames.], batch size: 21, lr: 4.49e-04 +2022-05-27 21:51:42,042 INFO [train.py:823] (3/4) Epoch 38, batch 350, loss[loss=0.1649, simple_loss=0.2454, pruned_loss=0.04221, over 6843.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2497, pruned_loss=0.03487, over 1182161.63 frames.], batch size: 15, lr: 4.48e-04 +2022-05-27 21:52:21,197 INFO [train.py:823] (3/4) Epoch 38, batch 400, loss[loss=0.1625, simple_loss=0.2535, pruned_loss=0.03581, over 4793.00 frames.], tot_loss[loss=0.1607, simple_loss=0.2509, pruned_loss=0.0352, over 1235641.38 frames.], batch size: 46, lr: 4.48e-04 +2022-05-27 21:53:00,061 INFO [train.py:823] (3/4) Epoch 38, batch 450, loss[loss=0.1751, simple_loss=0.2699, pruned_loss=0.04014, over 7199.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2496, pruned_loss=0.03478, over 1281253.90 frames.], batch size: 20, lr: 4.48e-04 +2022-05-27 21:53:39,586 INFO [train.py:823] (3/4) Epoch 38, batch 500, loss[loss=0.1947, simple_loss=0.2914, pruned_loss=0.04898, over 7275.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2482, pruned_loss=0.03455, over 1315818.23 frames.], batch size: 21, lr: 4.47e-04 +2022-05-27 21:54:19,922 INFO [train.py:823] (3/4) Epoch 38, batch 550, loss[loss=0.1594, simple_loss=0.2645, pruned_loss=0.02709, over 7201.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2483, pruned_loss=0.03466, over 1334205.90 frames.], batch size: 20, lr: 4.47e-04 +2022-05-27 21:54:59,349 INFO [train.py:823] (3/4) Epoch 38, batch 600, loss[loss=0.1834, simple_loss=0.2744, pruned_loss=0.04619, over 6480.00 frames.], tot_loss[loss=0.1592, simple_loss=0.2484, pruned_loss=0.03505, over 1351933.20 frames.], batch size: 34, lr: 4.47e-04 +2022-05-27 21:55:39,810 INFO [train.py:823] (3/4) Epoch 38, batch 650, loss[loss=0.1543, simple_loss=0.2518, pruned_loss=0.02842, over 7281.00 frames.], tot_loss[loss=0.1602, simple_loss=0.2499, pruned_loss=0.03523, over 1367393.24 frames.], batch size: 20, lr: 4.46e-04 +2022-05-27 21:56:18,869 INFO [train.py:823] (3/4) Epoch 38, batch 700, loss[loss=0.1627, simple_loss=0.2519, pruned_loss=0.03676, over 7172.00 frames.], tot_loss[loss=0.16, simple_loss=0.25, pruned_loss=0.03497, over 1377756.08 frames.], batch size: 22, lr: 4.46e-04 +2022-05-27 21:56:57,013 INFO [train.py:823] (3/4) Epoch 38, batch 750, loss[loss=0.1696, simple_loss=0.2637, pruned_loss=0.03769, over 7250.00 frames.], tot_loss[loss=0.1596, simple_loss=0.2499, pruned_loss=0.03467, over 1381937.42 frames.], batch size: 24, lr: 4.46e-04 +2022-05-27 21:57:36,080 INFO [train.py:823] (3/4) Epoch 38, batch 800, loss[loss=0.1651, simple_loss=0.2563, pruned_loss=0.03699, over 7370.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2505, pruned_loss=0.03468, over 1384647.98 frames.], batch size: 21, lr: 4.45e-04 +2022-05-27 21:58:14,994 INFO [train.py:823] (3/4) Epoch 38, batch 850, loss[loss=0.1988, simple_loss=0.3029, pruned_loss=0.04737, over 6953.00 frames.], tot_loss[loss=0.1606, simple_loss=0.2513, pruned_loss=0.03501, over 1394793.35 frames.], batch size: 29, lr: 4.45e-04 +2022-05-27 21:58:54,508 INFO [train.py:823] (3/4) Epoch 38, batch 900, loss[loss=0.1651, simple_loss=0.2614, pruned_loss=0.03444, over 6997.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2508, pruned_loss=0.035, over 1399008.30 frames.], batch size: 16, lr: 4.45e-04 +2022-05-27 21:59:32,705 INFO [train.py:823] (3/4) Epoch 38, batch 950, loss[loss=0.1745, simple_loss=0.2641, pruned_loss=0.04251, over 5200.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2501, pruned_loss=0.03486, over 1375364.17 frames.], batch size: 47, lr: 4.45e-04 +2022-05-27 21:59:45,977 INFO [train.py:823] (3/4) Epoch 39, batch 0, loss[loss=0.1497, simple_loss=0.2406, pruned_loss=0.02942, over 7276.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2406, pruned_loss=0.02942, over 7276.00 frames.], batch size: 19, lr: 4.39e-04 +2022-05-27 22:00:25,289 INFO [train.py:823] (3/4) Epoch 39, batch 50, loss[loss=0.1639, simple_loss=0.2524, pruned_loss=0.03773, over 7417.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2488, pruned_loss=0.03594, over 321312.30 frames.], batch size: 22, lr: 4.39e-04 +2022-05-27 22:01:04,329 INFO [train.py:823] (3/4) Epoch 39, batch 100, loss[loss=0.1388, simple_loss=0.2232, pruned_loss=0.02718, over 7309.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2463, pruned_loss=0.03452, over 566048.29 frames.], batch size: 18, lr: 4.38e-04 +2022-05-27 22:01:43,980 INFO [train.py:823] (3/4) Epoch 39, batch 150, loss[loss=0.1677, simple_loss=0.2598, pruned_loss=0.03778, over 7212.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2452, pruned_loss=0.03393, over 754149.49 frames.], batch size: 25, lr: 4.38e-04 +2022-05-27 22:02:23,536 INFO [train.py:823] (3/4) Epoch 39, batch 200, loss[loss=0.1477, simple_loss=0.238, pruned_loss=0.02872, over 7385.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2455, pruned_loss=0.0337, over 905912.36 frames.], batch size: 19, lr: 4.38e-04 +2022-05-27 22:03:03,286 INFO [train.py:823] (3/4) Epoch 39, batch 250, loss[loss=0.1822, simple_loss=0.2627, pruned_loss=0.05085, over 7296.00 frames.], tot_loss[loss=0.157, simple_loss=0.2463, pruned_loss=0.03384, over 1019954.24 frames.], batch size: 19, lr: 4.37e-04 +2022-05-27 22:03:42,563 INFO [train.py:823] (3/4) Epoch 39, batch 300, loss[loss=0.1527, simple_loss=0.2435, pruned_loss=0.03099, over 7283.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2462, pruned_loss=0.03405, over 1112219.51 frames.], batch size: 19, lr: 4.37e-04 +2022-05-27 22:04:21,969 INFO [train.py:823] (3/4) Epoch 39, batch 350, loss[loss=0.1655, simple_loss=0.2483, pruned_loss=0.0413, over 7366.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2467, pruned_loss=0.03405, over 1183682.09 frames.], batch size: 20, lr: 4.37e-04 +2022-05-27 22:05:01,327 INFO [train.py:823] (3/4) Epoch 39, batch 400, loss[loss=0.1576, simple_loss=0.244, pruned_loss=0.03558, over 7020.00 frames.], tot_loss[loss=0.1572, simple_loss=0.247, pruned_loss=0.03365, over 1241105.88 frames.], batch size: 17, lr: 4.36e-04 +2022-05-27 22:05:40,483 INFO [train.py:823] (3/4) Epoch 39, batch 450, loss[loss=0.1663, simple_loss=0.2525, pruned_loss=0.04002, over 7047.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2477, pruned_loss=0.03348, over 1280504.53 frames.], batch size: 26, lr: 4.36e-04 +2022-05-27 22:06:19,083 INFO [train.py:823] (3/4) Epoch 39, batch 500, loss[loss=0.1774, simple_loss=0.2632, pruned_loss=0.04579, over 5204.00 frames.], tot_loss[loss=0.1571, simple_loss=0.247, pruned_loss=0.03362, over 1309979.14 frames.], batch size: 48, lr: 4.36e-04 +2022-05-27 22:06:58,259 INFO [train.py:823] (3/4) Epoch 39, batch 550, loss[loss=0.1535, simple_loss=0.2455, pruned_loss=0.03071, over 7190.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2469, pruned_loss=0.03373, over 1330776.35 frames.], batch size: 25, lr: 4.36e-04 +2022-05-27 22:07:37,616 INFO [train.py:823] (3/4) Epoch 39, batch 600, loss[loss=0.1547, simple_loss=0.2322, pruned_loss=0.03863, over 7032.00 frames.], tot_loss[loss=0.158, simple_loss=0.2476, pruned_loss=0.0342, over 1354505.25 frames.], batch size: 17, lr: 4.35e-04 +2022-05-27 22:08:17,310 INFO [train.py:823] (3/4) Epoch 39, batch 650, loss[loss=0.1801, simple_loss=0.2663, pruned_loss=0.04692, over 7394.00 frames.], tot_loss[loss=0.1579, simple_loss=0.2478, pruned_loss=0.034, over 1373343.69 frames.], batch size: 19, lr: 4.35e-04 +2022-05-27 22:08:55,745 INFO [train.py:823] (3/4) Epoch 39, batch 700, loss[loss=0.1733, simple_loss=0.2688, pruned_loss=0.03889, over 7242.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2482, pruned_loss=0.03398, over 1382791.75 frames.], batch size: 24, lr: 4.35e-04 +2022-05-27 22:09:34,865 INFO [train.py:823] (3/4) Epoch 39, batch 750, loss[loss=0.1665, simple_loss=0.2628, pruned_loss=0.03507, over 7375.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2479, pruned_loss=0.03422, over 1390573.56 frames.], batch size: 20, lr: 4.34e-04 +2022-05-27 22:10:14,160 INFO [train.py:823] (3/4) Epoch 39, batch 800, loss[loss=0.1517, simple_loss=0.235, pruned_loss=0.03416, over 7180.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2483, pruned_loss=0.0342, over 1399391.10 frames.], batch size: 18, lr: 4.34e-04 +2022-05-27 22:10:52,825 INFO [train.py:823] (3/4) Epoch 39, batch 850, loss[loss=0.162, simple_loss=0.2564, pruned_loss=0.03386, over 7323.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2486, pruned_loss=0.03407, over 1398473.52 frames.], batch size: 23, lr: 4.34e-04 +2022-05-27 22:11:31,903 INFO [train.py:823] (3/4) Epoch 39, batch 900, loss[loss=0.154, simple_loss=0.2482, pruned_loss=0.02993, over 6994.00 frames.], tot_loss[loss=0.1597, simple_loss=0.2502, pruned_loss=0.03457, over 1391259.45 frames.], batch size: 29, lr: 4.34e-04 +2022-05-27 22:12:11,437 INFO [train.py:823] (3/4) Epoch 39, batch 950, loss[loss=0.1587, simple_loss=0.25, pruned_loss=0.03375, over 4848.00 frames.], tot_loss[loss=0.1604, simple_loss=0.2506, pruned_loss=0.03513, over 1364093.55 frames.], batch size: 47, lr: 4.33e-04 +2022-05-27 22:12:24,504 INFO [train.py:823] (3/4) Epoch 40, batch 0, loss[loss=0.1742, simple_loss=0.2679, pruned_loss=0.04026, over 7179.00 frames.], tot_loss[loss=0.1742, simple_loss=0.2679, pruned_loss=0.04026, over 7179.00 frames.], batch size: 23, lr: 4.28e-04 +2022-05-27 22:13:03,058 INFO [train.py:823] (3/4) Epoch 40, batch 50, loss[loss=0.149, simple_loss=0.2521, pruned_loss=0.02298, over 7116.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2462, pruned_loss=0.03403, over 318446.80 frames.], batch size: 20, lr: 4.28e-04 +2022-05-27 22:13:43,945 INFO [train.py:823] (3/4) Epoch 40, batch 100, loss[loss=0.1236, simple_loss=0.199, pruned_loss=0.02414, over 6784.00 frames.], tot_loss[loss=0.1574, simple_loss=0.247, pruned_loss=0.03396, over 560220.80 frames.], batch size: 15, lr: 4.27e-04 +2022-05-27 22:14:22,945 INFO [train.py:823] (3/4) Epoch 40, batch 150, loss[loss=0.1611, simple_loss=0.2581, pruned_loss=0.03203, over 6947.00 frames.], tot_loss[loss=0.1571, simple_loss=0.247, pruned_loss=0.03358, over 746819.18 frames.], batch size: 29, lr: 4.27e-04 +2022-05-27 22:15:02,192 INFO [train.py:823] (3/4) Epoch 40, batch 200, loss[loss=0.1704, simple_loss=0.261, pruned_loss=0.03987, over 7186.00 frames.], tot_loss[loss=0.1588, simple_loss=0.2483, pruned_loss=0.03466, over 897898.52 frames.], batch size: 21, lr: 4.27e-04 +2022-05-27 22:15:42,522 INFO [train.py:823] (3/4) Epoch 40, batch 250, loss[loss=0.1489, simple_loss=0.243, pruned_loss=0.02742, over 6800.00 frames.], tot_loss[loss=0.1586, simple_loss=0.2483, pruned_loss=0.03443, over 1015092.18 frames.], batch size: 15, lr: 4.26e-04 +2022-05-27 22:16:22,059 INFO [train.py:823] (3/4) Epoch 40, batch 300, loss[loss=0.1503, simple_loss=0.2451, pruned_loss=0.02777, over 7380.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2476, pruned_loss=0.03444, over 1105914.62 frames.], batch size: 20, lr: 4.26e-04 +2022-05-27 22:17:01,127 INFO [train.py:823] (3/4) Epoch 40, batch 350, loss[loss=0.1768, simple_loss=0.2738, pruned_loss=0.03986, over 6486.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2479, pruned_loss=0.03452, over 1178806.88 frames.], batch size: 34, lr: 4.26e-04 +2022-05-27 22:17:42,272 INFO [train.py:823] (3/4) Epoch 40, batch 400, loss[loss=0.143, simple_loss=0.2163, pruned_loss=0.03484, over 6997.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2476, pruned_loss=0.03439, over 1237480.94 frames.], batch size: 16, lr: 4.26e-04 +2022-05-27 22:18:21,089 INFO [train.py:823] (3/4) Epoch 40, batch 450, loss[loss=0.1408, simple_loss=0.2125, pruned_loss=0.03457, over 7218.00 frames.], tot_loss[loss=0.1587, simple_loss=0.2482, pruned_loss=0.03464, over 1277224.19 frames.], batch size: 16, lr: 4.25e-04 +2022-05-27 22:19:00,426 INFO [train.py:823] (3/4) Epoch 40, batch 500, loss[loss=0.1421, simple_loss=0.232, pruned_loss=0.02606, over 7372.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2479, pruned_loss=0.03417, over 1309750.51 frames.], batch size: 20, lr: 4.25e-04 +2022-05-27 22:19:39,690 INFO [train.py:823] (3/4) Epoch 40, batch 550, loss[loss=0.1797, simple_loss=0.2737, pruned_loss=0.04285, over 7310.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2476, pruned_loss=0.03403, over 1336904.92 frames.], batch size: 22, lr: 4.25e-04 +2022-05-27 22:20:18,681 INFO [train.py:823] (3/4) Epoch 40, batch 600, loss[loss=0.1564, simple_loss=0.2579, pruned_loss=0.02746, over 7308.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2475, pruned_loss=0.03384, over 1356484.09 frames.], batch size: 22, lr: 4.24e-04 +2022-05-27 22:20:57,895 INFO [train.py:823] (3/4) Epoch 40, batch 650, loss[loss=0.1551, simple_loss=0.2491, pruned_loss=0.03055, over 7186.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2477, pruned_loss=0.03381, over 1366494.37 frames.], batch size: 19, lr: 4.24e-04 +2022-05-27 22:21:37,057 INFO [train.py:823] (3/4) Epoch 40, batch 700, loss[loss=0.1609, simple_loss=0.2569, pruned_loss=0.03246, over 7188.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2491, pruned_loss=0.03397, over 1378506.89 frames.], batch size: 20, lr: 4.24e-04 +2022-05-27 22:22:15,886 INFO [train.py:823] (3/4) Epoch 40, batch 750, loss[loss=0.1657, simple_loss=0.2715, pruned_loss=0.02996, over 4903.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2493, pruned_loss=0.03378, over 1389018.47 frames.], batch size: 47, lr: 4.24e-04 +2022-05-27 22:22:55,335 INFO [train.py:823] (3/4) Epoch 40, batch 800, loss[loss=0.1635, simple_loss=0.2526, pruned_loss=0.03715, over 7176.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2489, pruned_loss=0.03366, over 1391256.20 frames.], batch size: 21, lr: 4.23e-04 +2022-05-27 22:23:34,269 INFO [train.py:823] (3/4) Epoch 40, batch 850, loss[loss=0.1667, simple_loss=0.2486, pruned_loss=0.0424, over 7164.00 frames.], tot_loss[loss=0.1584, simple_loss=0.2491, pruned_loss=0.03385, over 1399735.73 frames.], batch size: 22, lr: 4.23e-04 +2022-05-27 22:24:12,892 INFO [train.py:823] (3/4) Epoch 40, batch 900, loss[loss=0.1589, simple_loss=0.2512, pruned_loss=0.03326, over 7364.00 frames.], tot_loss[loss=0.158, simple_loss=0.2485, pruned_loss=0.03378, over 1391327.71 frames.], batch size: 20, lr: 4.23e-04 +2022-05-27 22:25:03,853 INFO [train.py:823] (3/4) Epoch 41, batch 0, loss[loss=0.1424, simple_loss=0.2346, pruned_loss=0.02514, over 7083.00 frames.], tot_loss[loss=0.1424, simple_loss=0.2346, pruned_loss=0.02514, over 7083.00 frames.], batch size: 19, lr: 4.17e-04 +2022-05-27 22:25:43,089 INFO [train.py:823] (3/4) Epoch 41, batch 50, loss[loss=0.1653, simple_loss=0.2467, pruned_loss=0.04196, over 7362.00 frames.], tot_loss[loss=0.1599, simple_loss=0.2499, pruned_loss=0.03496, over 321250.31 frames.], batch size: 20, lr: 4.17e-04 +2022-05-27 22:26:21,992 INFO [train.py:823] (3/4) Epoch 41, batch 100, loss[loss=0.1428, simple_loss=0.2375, pruned_loss=0.02404, over 7104.00 frames.], tot_loss[loss=0.159, simple_loss=0.2501, pruned_loss=0.03392, over 561138.76 frames.], batch size: 18, lr: 4.17e-04 +2022-05-27 22:27:01,002 INFO [train.py:823] (3/4) Epoch 41, batch 150, loss[loss=0.1573, simple_loss=0.2453, pruned_loss=0.03463, over 7001.00 frames.], tot_loss[loss=0.1583, simple_loss=0.249, pruned_loss=0.0338, over 753281.83 frames.], batch size: 26, lr: 4.17e-04 +2022-05-27 22:27:40,096 INFO [train.py:823] (3/4) Epoch 41, batch 200, loss[loss=0.157, simple_loss=0.2459, pruned_loss=0.0341, over 7389.00 frames.], tot_loss[loss=0.1593, simple_loss=0.2498, pruned_loss=0.03442, over 905834.14 frames.], batch size: 19, lr: 4.16e-04 +2022-05-27 22:28:19,439 INFO [train.py:823] (3/4) Epoch 41, batch 250, loss[loss=0.1551, simple_loss=0.2467, pruned_loss=0.03175, over 7106.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2484, pruned_loss=0.03404, over 1016369.67 frames.], batch size: 19, lr: 4.16e-04 +2022-05-27 22:28:57,997 INFO [train.py:823] (3/4) Epoch 41, batch 300, loss[loss=0.1385, simple_loss=0.2296, pruned_loss=0.0237, over 7371.00 frames.], tot_loss[loss=0.1571, simple_loss=0.247, pruned_loss=0.03359, over 1107206.19 frames.], batch size: 20, lr: 4.16e-04 +2022-05-27 22:29:36,967 INFO [train.py:823] (3/4) Epoch 41, batch 350, loss[loss=0.1819, simple_loss=0.2708, pruned_loss=0.0465, over 7176.00 frames.], tot_loss[loss=0.1577, simple_loss=0.2479, pruned_loss=0.03375, over 1174707.08 frames.], batch size: 22, lr: 4.15e-04 +2022-05-27 22:30:15,347 INFO [train.py:823] (3/4) Epoch 41, batch 400, loss[loss=0.164, simple_loss=0.2492, pruned_loss=0.03941, over 7156.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2474, pruned_loss=0.03358, over 1221207.89 frames.], batch size: 23, lr: 4.15e-04 +2022-05-27 22:30:54,570 INFO [train.py:823] (3/4) Epoch 41, batch 450, loss[loss=0.1346, simple_loss=0.2265, pruned_loss=0.02141, over 7100.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2482, pruned_loss=0.03413, over 1264031.01 frames.], batch size: 18, lr: 4.15e-04 +2022-05-27 22:31:33,867 INFO [train.py:823] (3/4) Epoch 41, batch 500, loss[loss=0.1616, simple_loss=0.2553, pruned_loss=0.0339, over 7301.00 frames.], tot_loss[loss=0.1585, simple_loss=0.2486, pruned_loss=0.03421, over 1299343.12 frames.], batch size: 22, lr: 4.15e-04 +2022-05-27 22:32:12,809 INFO [train.py:823] (3/4) Epoch 41, batch 550, loss[loss=0.1441, simple_loss=0.232, pruned_loss=0.02809, over 7193.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2475, pruned_loss=0.03341, over 1323334.50 frames.], batch size: 19, lr: 4.14e-04 +2022-05-27 22:32:51,769 INFO [train.py:823] (3/4) Epoch 41, batch 600, loss[loss=0.2178, simple_loss=0.2962, pruned_loss=0.06975, over 7182.00 frames.], tot_loss[loss=0.1579, simple_loss=0.248, pruned_loss=0.03395, over 1339716.07 frames.], batch size: 21, lr: 4.14e-04 +2022-05-27 22:33:31,359 INFO [train.py:823] (3/4) Epoch 41, batch 650, loss[loss=0.1743, simple_loss=0.27, pruned_loss=0.03929, over 7188.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2484, pruned_loss=0.03394, over 1358738.15 frames.], batch size: 21, lr: 4.14e-04 +2022-05-27 22:34:10,152 INFO [train.py:823] (3/4) Epoch 41, batch 700, loss[loss=0.1348, simple_loss=0.2141, pruned_loss=0.02771, over 6889.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2476, pruned_loss=0.03358, over 1371608.91 frames.], batch size: 15, lr: 4.14e-04 +2022-05-27 22:34:50,579 INFO [train.py:823] (3/4) Epoch 41, batch 750, loss[loss=0.1354, simple_loss=0.2181, pruned_loss=0.02633, over 7183.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2475, pruned_loss=0.03349, over 1379542.30 frames.], batch size: 18, lr: 4.13e-04 +2022-05-27 22:35:29,218 INFO [train.py:823] (3/4) Epoch 41, batch 800, loss[loss=0.1319, simple_loss=0.2221, pruned_loss=0.02086, over 7300.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2468, pruned_loss=0.03312, over 1381575.43 frames.], batch size: 17, lr: 4.13e-04 +2022-05-27 22:36:09,569 INFO [train.py:823] (3/4) Epoch 41, batch 850, loss[loss=0.1283, simple_loss=0.2153, pruned_loss=0.02064, over 7286.00 frames.], tot_loss[loss=0.1571, simple_loss=0.2477, pruned_loss=0.03323, over 1394358.58 frames.], batch size: 19, lr: 4.13e-04 +2022-05-27 22:36:48,728 INFO [train.py:823] (3/4) Epoch 41, batch 900, loss[loss=0.1366, simple_loss=0.2131, pruned_loss=0.03007, over 7279.00 frames.], tot_loss[loss=0.1568, simple_loss=0.247, pruned_loss=0.0333, over 1400002.36 frames.], batch size: 17, lr: 4.13e-04 +2022-05-27 22:37:42,339 INFO [train.py:823] (3/4) Epoch 42, batch 0, loss[loss=0.1668, simple_loss=0.2612, pruned_loss=0.03624, over 7295.00 frames.], tot_loss[loss=0.1668, simple_loss=0.2612, pruned_loss=0.03624, over 7295.00 frames.], batch size: 21, lr: 4.07e-04 +2022-05-27 22:38:21,757 INFO [train.py:823] (3/4) Epoch 42, batch 50, loss[loss=0.1429, simple_loss=0.2274, pruned_loss=0.02918, over 7390.00 frames.], tot_loss[loss=0.1557, simple_loss=0.2441, pruned_loss=0.0336, over 323954.37 frames.], batch size: 19, lr: 4.07e-04 +2022-05-27 22:39:02,329 INFO [train.py:823] (3/4) Epoch 42, batch 100, loss[loss=0.1399, simple_loss=0.2206, pruned_loss=0.02962, over 6810.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2446, pruned_loss=0.03219, over 565962.79 frames.], batch size: 15, lr: 4.07e-04 +2022-05-27 22:39:41,255 INFO [train.py:823] (3/4) Epoch 42, batch 150, loss[loss=0.1738, simple_loss=0.2668, pruned_loss=0.04038, over 7178.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2444, pruned_loss=0.03261, over 755756.40 frames.], batch size: 22, lr: 4.07e-04 +2022-05-27 22:40:22,043 INFO [train.py:823] (3/4) Epoch 42, batch 200, loss[loss=0.1641, simple_loss=0.2517, pruned_loss=0.03825, over 7219.00 frames.], tot_loss[loss=0.1564, simple_loss=0.246, pruned_loss=0.03345, over 901419.87 frames.], batch size: 24, lr: 4.06e-04 +2022-05-27 22:41:01,173 INFO [train.py:823] (3/4) Epoch 42, batch 250, loss[loss=0.1321, simple_loss=0.2217, pruned_loss=0.02124, over 7146.00 frames.], tot_loss[loss=0.1563, simple_loss=0.2462, pruned_loss=0.03316, over 1016806.59 frames.], batch size: 17, lr: 4.06e-04 +2022-05-27 22:41:40,216 INFO [train.py:823] (3/4) Epoch 42, batch 300, loss[loss=0.1542, simple_loss=0.2489, pruned_loss=0.02972, over 7187.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2465, pruned_loss=0.03357, over 1101213.78 frames.], batch size: 21, lr: 4.06e-04 +2022-05-27 22:42:18,677 INFO [train.py:823] (3/4) Epoch 42, batch 350, loss[loss=0.1382, simple_loss=0.2217, pruned_loss=0.02734, over 7140.00 frames.], tot_loss[loss=0.1567, simple_loss=0.2463, pruned_loss=0.03354, over 1167545.47 frames.], batch size: 17, lr: 4.06e-04 +2022-05-27 22:42:57,748 INFO [train.py:823] (3/4) Epoch 42, batch 400, loss[loss=0.1306, simple_loss=0.2084, pruned_loss=0.02636, over 7289.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2463, pruned_loss=0.03365, over 1217631.74 frames.], batch size: 17, lr: 4.05e-04 +2022-05-27 22:43:36,655 INFO [train.py:823] (3/4) Epoch 42, batch 450, loss[loss=0.1499, simple_loss=0.2446, pruned_loss=0.02757, over 7196.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2474, pruned_loss=0.03392, over 1267478.96 frames.], batch size: 25, lr: 4.05e-04 +2022-05-27 22:44:16,194 INFO [train.py:823] (3/4) Epoch 42, batch 500, loss[loss=0.1528, simple_loss=0.2257, pruned_loss=0.03993, over 7156.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2469, pruned_loss=0.03394, over 1302255.36 frames.], batch size: 17, lr: 4.05e-04 +2022-05-27 22:44:54,561 INFO [train.py:823] (3/4) Epoch 42, batch 550, loss[loss=0.145, simple_loss=0.235, pruned_loss=0.0275, over 7184.00 frames.], tot_loss[loss=0.1572, simple_loss=0.2471, pruned_loss=0.03369, over 1322168.69 frames.], batch size: 18, lr: 4.05e-04 +2022-05-27 22:45:33,816 INFO [train.py:823] (3/4) Epoch 42, batch 600, loss[loss=0.1554, simple_loss=0.2579, pruned_loss=0.02643, over 7194.00 frames.], tot_loss[loss=0.1582, simple_loss=0.2483, pruned_loss=0.03399, over 1343673.71 frames.], batch size: 20, lr: 4.04e-04 +2022-05-27 22:46:12,681 INFO [train.py:823] (3/4) Epoch 42, batch 650, loss[loss=0.1679, simple_loss=0.2677, pruned_loss=0.03405, over 7151.00 frames.], tot_loss[loss=0.158, simple_loss=0.2483, pruned_loss=0.03384, over 1363988.35 frames.], batch size: 23, lr: 4.04e-04 +2022-05-27 22:46:51,911 INFO [train.py:823] (3/4) Epoch 42, batch 700, loss[loss=0.1816, simple_loss=0.2835, pruned_loss=0.03982, over 7003.00 frames.], tot_loss[loss=0.1581, simple_loss=0.2486, pruned_loss=0.03382, over 1369693.81 frames.], batch size: 29, lr: 4.04e-04 +2022-05-27 22:47:31,201 INFO [train.py:823] (3/4) Epoch 42, batch 750, loss[loss=0.1779, simple_loss=0.282, pruned_loss=0.0369, over 7374.00 frames.], tot_loss[loss=0.1574, simple_loss=0.2479, pruned_loss=0.03347, over 1383156.32 frames.], batch size: 21, lr: 4.04e-04 +2022-05-27 22:48:10,610 INFO [train.py:823] (3/4) Epoch 42, batch 800, loss[loss=0.1675, simple_loss=0.2613, pruned_loss=0.03681, over 6287.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2479, pruned_loss=0.03363, over 1391058.57 frames.], batch size: 34, lr: 4.03e-04 +2022-05-27 22:48:49,652 INFO [train.py:823] (3/4) Epoch 42, batch 850, loss[loss=0.145, simple_loss=0.2312, pruned_loss=0.02939, over 7023.00 frames.], tot_loss[loss=0.1569, simple_loss=0.2473, pruned_loss=0.03325, over 1396161.99 frames.], batch size: 17, lr: 4.03e-04 +2022-05-27 22:49:29,039 INFO [train.py:823] (3/4) Epoch 42, batch 900, loss[loss=0.1507, simple_loss=0.2481, pruned_loss=0.02664, over 5094.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2461, pruned_loss=0.03278, over 1395149.03 frames.], batch size: 46, lr: 4.03e-04 +2022-05-27 22:50:20,089 INFO [train.py:823] (3/4) Epoch 43, batch 0, loss[loss=0.1375, simple_loss=0.232, pruned_loss=0.0215, over 7287.00 frames.], tot_loss[loss=0.1375, simple_loss=0.232, pruned_loss=0.0215, over 7287.00 frames.], batch size: 19, lr: 3.98e-04 +2022-05-27 22:50:59,540 INFO [train.py:823] (3/4) Epoch 43, batch 50, loss[loss=0.1591, simple_loss=0.248, pruned_loss=0.03513, over 7374.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2438, pruned_loss=0.03164, over 321893.73 frames.], batch size: 20, lr: 3.98e-04 +2022-05-27 22:51:38,843 INFO [train.py:823] (3/4) Epoch 43, batch 100, loss[loss=0.1814, simple_loss=0.2671, pruned_loss=0.04782, over 7164.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2443, pruned_loss=0.03219, over 564852.71 frames.], batch size: 23, lr: 3.97e-04 +2022-05-27 22:52:22,716 INFO [train.py:823] (3/4) Epoch 43, batch 150, loss[loss=0.1419, simple_loss=0.2419, pruned_loss=0.0209, over 6710.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2449, pruned_loss=0.03307, over 754286.81 frames.], batch size: 34, lr: 3.97e-04 +2022-05-27 22:53:01,465 INFO [train.py:823] (3/4) Epoch 43, batch 200, loss[loss=0.1843, simple_loss=0.2673, pruned_loss=0.0506, over 7341.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2457, pruned_loss=0.03325, over 905531.89 frames.], batch size: 23, lr: 3.97e-04 +2022-05-27 22:53:40,857 INFO [train.py:823] (3/4) Epoch 43, batch 250, loss[loss=0.1458, simple_loss=0.2261, pruned_loss=0.03279, over 7307.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2452, pruned_loss=0.03266, over 1022066.92 frames.], batch size: 18, lr: 3.97e-04 +2022-05-27 22:54:19,531 INFO [train.py:823] (3/4) Epoch 43, batch 300, loss[loss=0.1658, simple_loss=0.2534, pruned_loss=0.03907, over 7093.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2464, pruned_loss=0.03339, over 1102826.09 frames.], batch size: 18, lr: 3.96e-04 +2022-05-27 22:54:59,084 INFO [train.py:823] (3/4) Epoch 43, batch 350, loss[loss=0.1717, simple_loss=0.2597, pruned_loss=0.04181, over 7343.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2462, pruned_loss=0.03347, over 1174627.44 frames.], batch size: 23, lr: 3.96e-04 +2022-05-27 22:55:37,586 INFO [train.py:823] (3/4) Epoch 43, batch 400, loss[loss=0.1532, simple_loss=0.2433, pruned_loss=0.03155, over 7183.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2458, pruned_loss=0.03354, over 1229076.84 frames.], batch size: 20, lr: 3.96e-04 +2022-05-27 22:56:17,091 INFO [train.py:823] (3/4) Epoch 43, batch 450, loss[loss=0.1499, simple_loss=0.2385, pruned_loss=0.03067, over 7172.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2456, pruned_loss=0.03299, over 1275581.23 frames.], batch size: 21, lr: 3.96e-04 +2022-05-27 22:56:56,314 INFO [train.py:823] (3/4) Epoch 43, batch 500, loss[loss=0.125, simple_loss=0.2073, pruned_loss=0.02139, over 7026.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2451, pruned_loss=0.03285, over 1307689.14 frames.], batch size: 17, lr: 3.95e-04 +2022-05-27 22:57:35,572 INFO [train.py:823] (3/4) Epoch 43, batch 550, loss[loss=0.164, simple_loss=0.2539, pruned_loss=0.03702, over 7285.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2465, pruned_loss=0.03299, over 1337582.44 frames.], batch size: 21, lr: 3.95e-04 +2022-05-27 22:58:14,112 INFO [train.py:823] (3/4) Epoch 43, batch 600, loss[loss=0.1862, simple_loss=0.277, pruned_loss=0.04771, over 7174.00 frames.], tot_loss[loss=0.1568, simple_loss=0.2471, pruned_loss=0.03322, over 1357093.34 frames.], batch size: 22, lr: 3.95e-04 +2022-05-27 22:58:54,306 INFO [train.py:823] (3/4) Epoch 43, batch 650, loss[loss=0.168, simple_loss=0.2664, pruned_loss=0.03477, over 7187.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2467, pruned_loss=0.03307, over 1373768.80 frames.], batch size: 20, lr: 3.95e-04 +2022-05-27 22:59:34,034 INFO [train.py:823] (3/4) Epoch 43, batch 700, loss[loss=0.1347, simple_loss=0.2241, pruned_loss=0.02268, over 7039.00 frames.], tot_loss[loss=0.1564, simple_loss=0.2468, pruned_loss=0.03301, over 1384013.02 frames.], batch size: 17, lr: 3.94e-04 +2022-05-27 23:00:13,166 INFO [train.py:823] (3/4) Epoch 43, batch 750, loss[loss=0.149, simple_loss=0.2477, pruned_loss=0.02513, over 7174.00 frames.], tot_loss[loss=0.1566, simple_loss=0.2466, pruned_loss=0.03328, over 1392704.60 frames.], batch size: 21, lr: 3.94e-04 +2022-05-27 23:00:51,809 INFO [train.py:823] (3/4) Epoch 43, batch 800, loss[loss=0.1464, simple_loss=0.2418, pruned_loss=0.02551, over 7307.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2461, pruned_loss=0.03304, over 1402273.55 frames.], batch size: 22, lr: 3.94e-04 +2022-05-27 23:01:30,969 INFO [train.py:823] (3/4) Epoch 43, batch 850, loss[loss=0.1639, simple_loss=0.2547, pruned_loss=0.03655, over 7162.00 frames.], tot_loss[loss=0.1555, simple_loss=0.2453, pruned_loss=0.03282, over 1404009.61 frames.], batch size: 22, lr: 3.94e-04 +2022-05-27 23:02:11,070 INFO [train.py:823] (3/4) Epoch 43, batch 900, loss[loss=0.1418, simple_loss=0.2232, pruned_loss=0.03015, over 6799.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2446, pruned_loss=0.03284, over 1402061.08 frames.], batch size: 15, lr: 3.93e-04 +2022-05-27 23:03:00,551 INFO [train.py:823] (3/4) Epoch 44, batch 0, loss[loss=0.1573, simple_loss=0.2497, pruned_loss=0.03252, over 7294.00 frames.], tot_loss[loss=0.1573, simple_loss=0.2497, pruned_loss=0.03252, over 7294.00 frames.], batch size: 22, lr: 3.89e-04 +2022-05-27 23:03:41,249 INFO [train.py:823] (3/4) Epoch 44, batch 50, loss[loss=0.1412, simple_loss=0.2289, pruned_loss=0.02675, over 7432.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2426, pruned_loss=0.03213, over 322609.10 frames.], batch size: 18, lr: 3.89e-04 +2022-05-27 23:04:20,614 INFO [train.py:823] (3/4) Epoch 44, batch 100, loss[loss=0.1438, simple_loss=0.2329, pruned_loss=0.02736, over 7277.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2442, pruned_loss=0.03227, over 566829.67 frames.], batch size: 20, lr: 3.88e-04 +2022-05-27 23:04:59,753 INFO [train.py:823] (3/4) Epoch 44, batch 150, loss[loss=0.1447, simple_loss=0.2375, pruned_loss=0.02593, over 7277.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2435, pruned_loss=0.0317, over 757808.74 frames.], batch size: 20, lr: 3.88e-04 +2022-05-27 23:05:38,972 INFO [train.py:823] (3/4) Epoch 44, batch 200, loss[loss=0.1653, simple_loss=0.2694, pruned_loss=0.03059, over 7225.00 frames.], tot_loss[loss=0.154, simple_loss=0.2445, pruned_loss=0.03178, over 903622.73 frames.], batch size: 24, lr: 3.88e-04 +2022-05-27 23:06:18,023 INFO [train.py:823] (3/4) Epoch 44, batch 250, loss[loss=0.1676, simple_loss=0.261, pruned_loss=0.0371, over 7155.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2455, pruned_loss=0.03216, over 1019418.96 frames.], batch size: 23, lr: 3.88e-04 +2022-05-27 23:06:56,919 INFO [train.py:823] (3/4) Epoch 44, batch 300, loss[loss=0.1655, simple_loss=0.2603, pruned_loss=0.03539, over 7276.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2458, pruned_loss=0.032, over 1106740.48 frames.], batch size: 21, lr: 3.87e-04 +2022-05-27 23:07:35,763 INFO [train.py:823] (3/4) Epoch 44, batch 350, loss[loss=0.1347, simple_loss=0.2135, pruned_loss=0.0279, over 7013.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2453, pruned_loss=0.03217, over 1169330.22 frames.], batch size: 16, lr: 3.87e-04 +2022-05-27 23:08:14,792 INFO [train.py:823] (3/4) Epoch 44, batch 400, loss[loss=0.1877, simple_loss=0.2765, pruned_loss=0.04947, over 4786.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2452, pruned_loss=0.0325, over 1221222.37 frames.], batch size: 46, lr: 3.87e-04 +2022-05-27 23:08:53,948 INFO [train.py:823] (3/4) Epoch 44, batch 450, loss[loss=0.1609, simple_loss=0.2561, pruned_loss=0.0329, over 7239.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2462, pruned_loss=0.03274, over 1264738.87 frames.], batch size: 25, lr: 3.87e-04 +2022-05-27 23:09:33,481 INFO [train.py:823] (3/4) Epoch 44, batch 500, loss[loss=0.1789, simple_loss=0.2576, pruned_loss=0.05012, over 7154.00 frames.], tot_loss[loss=0.1551, simple_loss=0.2452, pruned_loss=0.03248, over 1302144.06 frames.], batch size: 17, lr: 3.86e-04 +2022-05-27 23:10:12,719 INFO [train.py:823] (3/4) Epoch 44, batch 550, loss[loss=0.1668, simple_loss=0.2638, pruned_loss=0.0349, over 7226.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2456, pruned_loss=0.03242, over 1329951.49 frames.], batch size: 24, lr: 3.86e-04 +2022-05-27 23:10:52,030 INFO [train.py:823] (3/4) Epoch 44, batch 600, loss[loss=0.1603, simple_loss=0.2503, pruned_loss=0.03518, over 7395.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2457, pruned_loss=0.03243, over 1352432.68 frames.], batch size: 19, lr: 3.86e-04 +2022-05-27 23:11:30,708 INFO [train.py:823] (3/4) Epoch 44, batch 650, loss[loss=0.1586, simple_loss=0.2568, pruned_loss=0.03015, over 7433.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2452, pruned_loss=0.03229, over 1367352.28 frames.], batch size: 22, lr: 3.86e-04 +2022-05-27 23:12:09,932 INFO [train.py:823] (3/4) Epoch 44, batch 700, loss[loss=0.1693, simple_loss=0.2574, pruned_loss=0.04065, over 7152.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2447, pruned_loss=0.03236, over 1378616.39 frames.], batch size: 23, lr: 3.85e-04 +2022-05-27 23:12:48,586 INFO [train.py:823] (3/4) Epoch 44, batch 750, loss[loss=0.1533, simple_loss=0.2382, pruned_loss=0.03416, over 7168.00 frames.], tot_loss[loss=0.1553, simple_loss=0.2456, pruned_loss=0.03254, over 1390333.60 frames.], batch size: 17, lr: 3.85e-04 +2022-05-27 23:13:27,420 INFO [train.py:823] (3/4) Epoch 44, batch 800, loss[loss=0.1755, simple_loss=0.2742, pruned_loss=0.03836, over 7204.00 frames.], tot_loss[loss=0.1558, simple_loss=0.2462, pruned_loss=0.03266, over 1396542.93 frames.], batch size: 25, lr: 3.85e-04 +2022-05-27 23:14:06,948 INFO [train.py:823] (3/4) Epoch 44, batch 850, loss[loss=0.1416, simple_loss=0.2142, pruned_loss=0.03446, over 6804.00 frames.], tot_loss[loss=0.1556, simple_loss=0.246, pruned_loss=0.03257, over 1401785.91 frames.], batch size: 15, lr: 3.85e-04 +2022-05-27 23:14:46,130 INFO [train.py:823] (3/4) Epoch 44, batch 900, loss[loss=0.1459, simple_loss=0.2266, pruned_loss=0.03263, over 7287.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2459, pruned_loss=0.03245, over 1400096.62 frames.], batch size: 17, lr: 3.85e-04 +2022-05-27 23:15:24,411 INFO [train.py:823] (3/4) Epoch 44, batch 950, loss[loss=0.1612, simple_loss=0.2431, pruned_loss=0.03962, over 4986.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2454, pruned_loss=0.03248, over 1376987.95 frames.], batch size: 46, lr: 3.84e-04 +2022-05-27 23:15:37,785 INFO [train.py:823] (3/4) Epoch 45, batch 0, loss[loss=0.1477, simple_loss=0.2415, pruned_loss=0.02695, over 7286.00 frames.], tot_loss[loss=0.1477, simple_loss=0.2415, pruned_loss=0.02695, over 7286.00 frames.], batch size: 20, lr: 3.80e-04 +2022-05-27 23:16:17,175 INFO [train.py:823] (3/4) Epoch 45, batch 50, loss[loss=0.1549, simple_loss=0.2518, pruned_loss=0.02898, over 7286.00 frames.], tot_loss[loss=0.1562, simple_loss=0.2452, pruned_loss=0.03362, over 324187.50 frames.], batch size: 21, lr: 3.80e-04 +2022-05-27 23:16:56,325 INFO [train.py:823] (3/4) Epoch 45, batch 100, loss[loss=0.1863, simple_loss=0.2858, pruned_loss=0.04338, over 7374.00 frames.], tot_loss[loss=0.1578, simple_loss=0.2473, pruned_loss=0.03415, over 566408.00 frames.], batch size: 21, lr: 3.80e-04 +2022-05-27 23:17:35,600 INFO [train.py:823] (3/4) Epoch 45, batch 150, loss[loss=0.1175, simple_loss=0.2023, pruned_loss=0.01637, over 7228.00 frames.], tot_loss[loss=0.1559, simple_loss=0.2455, pruned_loss=0.03312, over 752089.90 frames.], batch size: 16, lr: 3.79e-04 +2022-05-27 23:18:14,594 INFO [train.py:823] (3/4) Epoch 45, batch 200, loss[loss=0.1569, simple_loss=0.2375, pruned_loss=0.03819, over 5396.00 frames.], tot_loss[loss=0.1561, simple_loss=0.2454, pruned_loss=0.0334, over 897543.60 frames.], batch size: 48, lr: 3.79e-04 +2022-05-27 23:18:53,817 INFO [train.py:823] (3/4) Epoch 45, batch 250, loss[loss=0.1468, simple_loss=0.2385, pruned_loss=0.02756, over 6416.00 frames.], tot_loss[loss=0.1558, simple_loss=0.245, pruned_loss=0.0333, over 1010571.35 frames.], batch size: 34, lr: 3.79e-04 +2022-05-27 23:19:32,699 INFO [train.py:823] (3/4) Epoch 45, batch 300, loss[loss=0.1708, simple_loss=0.2661, pruned_loss=0.03778, over 7161.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2443, pruned_loss=0.03268, over 1099452.75 frames.], batch size: 23, lr: 3.79e-04 +2022-05-27 23:20:11,972 INFO [train.py:823] (3/4) Epoch 45, batch 350, loss[loss=0.1583, simple_loss=0.2518, pruned_loss=0.03244, over 7422.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2447, pruned_loss=0.03254, over 1171846.80 frames.], batch size: 22, lr: 3.78e-04 +2022-05-27 23:20:50,964 INFO [train.py:823] (3/4) Epoch 45, batch 400, loss[loss=0.19, simple_loss=0.2732, pruned_loss=0.05341, over 7372.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2442, pruned_loss=0.03254, over 1228904.45 frames.], batch size: 20, lr: 3.78e-04 +2022-05-27 23:21:30,425 INFO [train.py:823] (3/4) Epoch 45, batch 450, loss[loss=0.1467, simple_loss=0.2382, pruned_loss=0.02757, over 7194.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2438, pruned_loss=0.03246, over 1269036.94 frames.], batch size: 18, lr: 3.78e-04 +2022-05-27 23:22:12,162 INFO [train.py:823] (3/4) Epoch 45, batch 500, loss[loss=0.1616, simple_loss=0.2522, pruned_loss=0.03543, over 7225.00 frames.], tot_loss[loss=0.1549, simple_loss=0.2444, pruned_loss=0.03265, over 1308178.37 frames.], batch size: 24, lr: 3.78e-04 +2022-05-27 23:22:51,924 INFO [train.py:823] (3/4) Epoch 45, batch 550, loss[loss=0.143, simple_loss=0.232, pruned_loss=0.02705, over 7184.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2432, pruned_loss=0.0322, over 1334512.72 frames.], batch size: 18, lr: 3.78e-04 +2022-05-27 23:23:31,348 INFO [train.py:823] (3/4) Epoch 45, batch 600, loss[loss=0.1478, simple_loss=0.243, pruned_loss=0.02627, over 6516.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2441, pruned_loss=0.03258, over 1347955.52 frames.], batch size: 34, lr: 3.77e-04 +2022-05-27 23:24:10,239 INFO [train.py:823] (3/4) Epoch 45, batch 650, loss[loss=0.1724, simple_loss=0.2577, pruned_loss=0.04352, over 7146.00 frames.], tot_loss[loss=0.1545, simple_loss=0.2438, pruned_loss=0.03264, over 1363179.06 frames.], batch size: 23, lr: 3.77e-04 +2022-05-27 23:24:50,463 INFO [train.py:823] (3/4) Epoch 45, batch 700, loss[loss=0.156, simple_loss=0.2547, pruned_loss=0.0286, over 7318.00 frames.], tot_loss[loss=0.1546, simple_loss=0.2444, pruned_loss=0.03242, over 1377499.09 frames.], batch size: 22, lr: 3.77e-04 +2022-05-27 23:25:30,051 INFO [train.py:823] (3/4) Epoch 45, batch 750, loss[loss=0.1542, simple_loss=0.2573, pruned_loss=0.02557, over 6992.00 frames.], tot_loss[loss=0.155, simple_loss=0.2449, pruned_loss=0.03256, over 1386216.86 frames.], batch size: 29, lr: 3.77e-04 +2022-05-27 23:26:08,787 INFO [train.py:823] (3/4) Epoch 45, batch 800, loss[loss=0.1707, simple_loss=0.2673, pruned_loss=0.03706, over 7329.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2444, pruned_loss=0.03189, over 1396078.10 frames.], batch size: 23, lr: 3.77e-04 +2022-05-27 23:26:49,341 INFO [train.py:823] (3/4) Epoch 45, batch 850, loss[loss=0.1519, simple_loss=0.2521, pruned_loss=0.02586, over 7185.00 frames.], tot_loss[loss=0.155, simple_loss=0.2457, pruned_loss=0.0321, over 1398619.35 frames.], batch size: 21, lr: 3.76e-04 +2022-05-27 23:27:28,233 INFO [train.py:823] (3/4) Epoch 45, batch 900, loss[loss=0.1437, simple_loss=0.2213, pruned_loss=0.03303, over 7420.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2456, pruned_loss=0.0319, over 1401421.03 frames.], batch size: 18, lr: 3.76e-04 +2022-05-27 23:28:22,800 INFO [train.py:823] (3/4) Epoch 46, batch 0, loss[loss=0.1677, simple_loss=0.2623, pruned_loss=0.03656, over 7161.00 frames.], tot_loss[loss=0.1677, simple_loss=0.2623, pruned_loss=0.03656, over 7161.00 frames.], batch size: 22, lr: 3.72e-04 +2022-05-27 23:29:02,127 INFO [train.py:823] (3/4) Epoch 46, batch 50, loss[loss=0.1464, simple_loss=0.2386, pruned_loss=0.02713, over 7274.00 frames.], tot_loss[loss=0.1565, simple_loss=0.2459, pruned_loss=0.03354, over 315484.88 frames.], batch size: 20, lr: 3.72e-04 +2022-05-27 23:29:41,285 INFO [train.py:823] (3/4) Epoch 46, batch 100, loss[loss=0.1216, simple_loss=0.2031, pruned_loss=0.02003, over 7002.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2429, pruned_loss=0.03182, over 561918.69 frames.], batch size: 16, lr: 3.71e-04 +2022-05-27 23:30:20,286 INFO [train.py:823] (3/4) Epoch 46, batch 150, loss[loss=0.1557, simple_loss=0.2505, pruned_loss=0.0305, over 7115.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2438, pruned_loss=0.03231, over 754697.52 frames.], batch size: 20, lr: 3.71e-04 +2022-05-27 23:30:59,889 INFO [train.py:823] (3/4) Epoch 46, batch 200, loss[loss=0.1543, simple_loss=0.2468, pruned_loss=0.03089, over 7343.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2427, pruned_loss=0.03172, over 906996.07 frames.], batch size: 23, lr: 3.71e-04 +2022-05-27 23:31:39,118 INFO [train.py:823] (3/4) Epoch 46, batch 250, loss[loss=0.1544, simple_loss=0.2393, pruned_loss=0.0347, over 7131.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2431, pruned_loss=0.03177, over 1021908.17 frames.], batch size: 23, lr: 3.71e-04 +2022-05-27 23:32:18,188 INFO [train.py:823] (3/4) Epoch 46, batch 300, loss[loss=0.1666, simple_loss=0.258, pruned_loss=0.03763, over 7017.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2447, pruned_loss=0.03232, over 1108234.52 frames.], batch size: 29, lr: 3.70e-04 +2022-05-27 23:32:56,773 INFO [train.py:823] (3/4) Epoch 46, batch 350, loss[loss=0.1806, simple_loss=0.2799, pruned_loss=0.04064, over 6380.00 frames.], tot_loss[loss=0.1552, simple_loss=0.2456, pruned_loss=0.03236, over 1180274.17 frames.], batch size: 34, lr: 3.70e-04 +2022-05-27 23:33:36,335 INFO [train.py:823] (3/4) Epoch 46, batch 400, loss[loss=0.1621, simple_loss=0.2561, pruned_loss=0.03398, over 7141.00 frames.], tot_loss[loss=0.156, simple_loss=0.2466, pruned_loss=0.03267, over 1236717.43 frames.], batch size: 23, lr: 3.70e-04 +2022-05-27 23:34:15,533 INFO [train.py:823] (3/4) Epoch 46, batch 450, loss[loss=0.1538, simple_loss=0.2404, pruned_loss=0.03357, over 7280.00 frames.], tot_loss[loss=0.1547, simple_loss=0.245, pruned_loss=0.03221, over 1279403.60 frames.], batch size: 20, lr: 3.70e-04 +2022-05-27 23:34:54,761 INFO [train.py:823] (3/4) Epoch 46, batch 500, loss[loss=0.1469, simple_loss=0.2301, pruned_loss=0.03184, over 6781.00 frames.], tot_loss[loss=0.155, simple_loss=0.2454, pruned_loss=0.03227, over 1304275.09 frames.], batch size: 15, lr: 3.70e-04 +2022-05-27 23:35:34,003 INFO [train.py:823] (3/4) Epoch 46, batch 550, loss[loss=0.1785, simple_loss=0.2699, pruned_loss=0.04352, over 7308.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2453, pruned_loss=0.03211, over 1333790.05 frames.], batch size: 22, lr: 3.69e-04 +2022-05-27 23:36:13,169 INFO [train.py:823] (3/4) Epoch 46, batch 600, loss[loss=0.1316, simple_loss=0.2133, pruned_loss=0.02498, over 7027.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2444, pruned_loss=0.03156, over 1351841.42 frames.], batch size: 17, lr: 3.69e-04 +2022-05-27 23:36:52,201 INFO [train.py:823] (3/4) Epoch 46, batch 650, loss[loss=0.1697, simple_loss=0.2612, pruned_loss=0.03912, over 7155.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2438, pruned_loss=0.03115, over 1366309.10 frames.], batch size: 23, lr: 3.69e-04 +2022-05-27 23:37:31,628 INFO [train.py:823] (3/4) Epoch 46, batch 700, loss[loss=0.136, simple_loss=0.2193, pruned_loss=0.02636, over 7166.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2429, pruned_loss=0.03102, over 1375147.95 frames.], batch size: 17, lr: 3.69e-04 +2022-05-27 23:38:10,515 INFO [train.py:823] (3/4) Epoch 46, batch 750, loss[loss=0.1518, simple_loss=0.25, pruned_loss=0.02685, over 6566.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2429, pruned_loss=0.0311, over 1383761.04 frames.], batch size: 34, lr: 3.69e-04 +2022-05-27 23:38:49,164 INFO [train.py:823] (3/4) Epoch 46, batch 800, loss[loss=0.1472, simple_loss=0.238, pruned_loss=0.02814, over 7194.00 frames.], tot_loss[loss=0.153, simple_loss=0.2436, pruned_loss=0.03125, over 1387459.05 frames.], batch size: 20, lr: 3.68e-04 +2022-05-27 23:39:28,364 INFO [train.py:823] (3/4) Epoch 46, batch 850, loss[loss=0.1803, simple_loss=0.2735, pruned_loss=0.04359, over 7363.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2438, pruned_loss=0.03132, over 1390097.31 frames.], batch size: 23, lr: 3.68e-04 +2022-05-27 23:40:07,729 INFO [train.py:823] (3/4) Epoch 46, batch 900, loss[loss=0.1411, simple_loss=0.2329, pruned_loss=0.02469, over 7089.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2445, pruned_loss=0.03149, over 1397112.78 frames.], batch size: 18, lr: 3.68e-04 +2022-05-27 23:41:01,427 INFO [train.py:823] (3/4) Epoch 47, batch 0, loss[loss=0.1325, simple_loss=0.2156, pruned_loss=0.02472, over 7004.00 frames.], tot_loss[loss=0.1325, simple_loss=0.2156, pruned_loss=0.02472, over 7004.00 frames.], batch size: 16, lr: 3.64e-04 +2022-05-27 23:41:40,296 INFO [train.py:823] (3/4) Epoch 47, batch 50, loss[loss=0.1611, simple_loss=0.2451, pruned_loss=0.03856, over 7292.00 frames.], tot_loss[loss=0.151, simple_loss=0.2402, pruned_loss=0.03087, over 322164.17 frames.], batch size: 17, lr: 3.64e-04 +2022-05-27 23:42:19,511 INFO [train.py:823] (3/4) Epoch 47, batch 100, loss[loss=0.1226, simple_loss=0.2085, pruned_loss=0.01831, over 7307.00 frames.], tot_loss[loss=0.1511, simple_loss=0.24, pruned_loss=0.03115, over 565706.97 frames.], batch size: 18, lr: 3.63e-04 +2022-05-27 23:42:58,970 INFO [train.py:823] (3/4) Epoch 47, batch 150, loss[loss=0.1534, simple_loss=0.253, pruned_loss=0.02689, over 7298.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2416, pruned_loss=0.03155, over 757100.12 frames.], batch size: 22, lr: 3.63e-04 +2022-05-27 23:43:37,723 INFO [train.py:823] (3/4) Epoch 47, batch 200, loss[loss=0.1748, simple_loss=0.2551, pruned_loss=0.04724, over 7089.00 frames.], tot_loss[loss=0.1532, simple_loss=0.2428, pruned_loss=0.03181, over 901192.27 frames.], batch size: 18, lr: 3.63e-04 +2022-05-27 23:44:17,093 INFO [train.py:823] (3/4) Epoch 47, batch 250, loss[loss=0.1351, simple_loss=0.231, pruned_loss=0.0196, over 7389.00 frames.], tot_loss[loss=0.1533, simple_loss=0.243, pruned_loss=0.03177, over 1022368.63 frames.], batch size: 19, lr: 3.63e-04 +2022-05-27 23:44:56,270 INFO [train.py:823] (3/4) Epoch 47, batch 300, loss[loss=0.1385, simple_loss=0.2176, pruned_loss=0.02969, over 7190.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2434, pruned_loss=0.03161, over 1111519.07 frames.], batch size: 18, lr: 3.63e-04 +2022-05-27 23:45:37,080 INFO [train.py:823] (3/4) Epoch 47, batch 350, loss[loss=0.1526, simple_loss=0.2457, pruned_loss=0.02978, over 7284.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2431, pruned_loss=0.03136, over 1179159.41 frames.], batch size: 20, lr: 3.62e-04 +2022-05-27 23:46:17,252 INFO [train.py:823] (3/4) Epoch 47, batch 400, loss[loss=0.1684, simple_loss=0.2566, pruned_loss=0.04011, over 7278.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2447, pruned_loss=0.03185, over 1233340.59 frames.], batch size: 20, lr: 3.62e-04 +2022-05-27 23:46:56,224 INFO [train.py:823] (3/4) Epoch 47, batch 450, loss[loss=0.1354, simple_loss=0.2232, pruned_loss=0.02376, over 7153.00 frames.], tot_loss[loss=0.1544, simple_loss=0.2448, pruned_loss=0.03201, over 1273917.14 frames.], batch size: 17, lr: 3.62e-04 +2022-05-27 23:47:36,422 INFO [train.py:823] (3/4) Epoch 47, batch 500, loss[loss=0.1389, simple_loss=0.2309, pruned_loss=0.02342, over 7089.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2452, pruned_loss=0.03215, over 1302754.52 frames.], batch size: 19, lr: 3.62e-04 +2022-05-27 23:48:15,827 INFO [train.py:823] (3/4) Epoch 47, batch 550, loss[loss=0.1373, simple_loss=0.2216, pruned_loss=0.02646, over 7389.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2444, pruned_loss=0.0319, over 1327552.71 frames.], batch size: 19, lr: 3.62e-04 +2022-05-27 23:48:54,669 INFO [train.py:823] (3/4) Epoch 47, batch 600, loss[loss=0.1638, simple_loss=0.2601, pruned_loss=0.03379, over 7025.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2441, pruned_loss=0.03178, over 1347041.82 frames.], batch size: 26, lr: 3.61e-04 +2022-05-27 23:49:35,024 INFO [train.py:823] (3/4) Epoch 47, batch 650, loss[loss=0.119, simple_loss=0.203, pruned_loss=0.01751, over 7302.00 frames.], tot_loss[loss=0.1548, simple_loss=0.2454, pruned_loss=0.03207, over 1364952.86 frames.], batch size: 17, lr: 3.61e-04 +2022-05-27 23:50:14,071 INFO [train.py:823] (3/4) Epoch 47, batch 700, loss[loss=0.2158, simple_loss=0.3174, pruned_loss=0.05716, over 7325.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2447, pruned_loss=0.03191, over 1372123.80 frames.], batch size: 23, lr: 3.61e-04 +2022-05-27 23:50:53,640 INFO [train.py:823] (3/4) Epoch 47, batch 750, loss[loss=0.1301, simple_loss=0.2203, pruned_loss=0.01992, over 7295.00 frames.], tot_loss[loss=0.1543, simple_loss=0.2446, pruned_loss=0.03199, over 1384285.90 frames.], batch size: 19, lr: 3.61e-04 +2022-05-27 23:51:32,306 INFO [train.py:823] (3/4) Epoch 47, batch 800, loss[loss=0.1786, simple_loss=0.2671, pruned_loss=0.04505, over 6986.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2438, pruned_loss=0.03157, over 1391345.36 frames.], batch size: 26, lr: 3.61e-04 +2022-05-27 23:52:11,469 INFO [train.py:823] (3/4) Epoch 47, batch 850, loss[loss=0.134, simple_loss=0.2282, pruned_loss=0.0199, over 7199.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2439, pruned_loss=0.03166, over 1393361.89 frames.], batch size: 18, lr: 3.60e-04 +2022-05-27 23:52:50,592 INFO [train.py:823] (3/4) Epoch 47, batch 900, loss[loss=0.1419, simple_loss=0.2375, pruned_loss=0.02321, over 7305.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2442, pruned_loss=0.0316, over 1398045.59 frames.], batch size: 22, lr: 3.60e-04 +2022-05-27 23:53:43,419 INFO [train.py:823] (3/4) Epoch 48, batch 0, loss[loss=0.1576, simple_loss=0.2467, pruned_loss=0.03424, over 7181.00 frames.], tot_loss[loss=0.1576, simple_loss=0.2467, pruned_loss=0.03424, over 7181.00 frames.], batch size: 21, lr: 3.56e-04 +2022-05-27 23:54:22,657 INFO [train.py:823] (3/4) Epoch 48, batch 50, loss[loss=0.1533, simple_loss=0.2366, pruned_loss=0.03499, over 7167.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2435, pruned_loss=0.03192, over 320604.17 frames.], batch size: 17, lr: 3.56e-04 +2022-05-27 23:55:02,009 INFO [train.py:823] (3/4) Epoch 48, batch 100, loss[loss=0.1635, simple_loss=0.2551, pruned_loss=0.03598, over 7183.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2427, pruned_loss=0.03149, over 565228.52 frames.], batch size: 25, lr: 3.56e-04 +2022-05-27 23:55:41,072 INFO [train.py:823] (3/4) Epoch 48, batch 150, loss[loss=0.1401, simple_loss=0.2171, pruned_loss=0.03153, over 7305.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2432, pruned_loss=0.03078, over 759984.17 frames.], batch size: 17, lr: 3.56e-04 +2022-05-27 23:56:20,211 INFO [train.py:823] (3/4) Epoch 48, batch 200, loss[loss=0.1782, simple_loss=0.2723, pruned_loss=0.04201, over 7303.00 frames.], tot_loss[loss=0.153, simple_loss=0.243, pruned_loss=0.03152, over 908407.65 frames.], batch size: 22, lr: 3.55e-04 +2022-05-27 23:56:59,317 INFO [train.py:823] (3/4) Epoch 48, batch 250, loss[loss=0.1785, simple_loss=0.2635, pruned_loss=0.04672, over 7192.00 frames.], tot_loss[loss=0.1539, simple_loss=0.2432, pruned_loss=0.03229, over 1024406.57 frames.], batch size: 19, lr: 3.55e-04 +2022-05-27 23:57:38,941 INFO [train.py:823] (3/4) Epoch 48, batch 300, loss[loss=0.1976, simple_loss=0.2806, pruned_loss=0.05724, over 6976.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2427, pruned_loss=0.03219, over 1116213.24 frames.], batch size: 26, lr: 3.55e-04 +2022-05-27 23:58:17,915 INFO [train.py:823] (3/4) Epoch 48, batch 350, loss[loss=0.1594, simple_loss=0.2563, pruned_loss=0.03119, over 4909.00 frames.], tot_loss[loss=0.1537, simple_loss=0.2432, pruned_loss=0.03206, over 1182418.43 frames.], batch size: 47, lr: 3.55e-04 +2022-05-27 23:58:57,411 INFO [train.py:823] (3/4) Epoch 48, batch 400, loss[loss=0.1635, simple_loss=0.2581, pruned_loss=0.03445, over 6473.00 frames.], tot_loss[loss=0.1529, simple_loss=0.2424, pruned_loss=0.03166, over 1237673.15 frames.], batch size: 34, lr: 3.55e-04 +2022-05-27 23:59:36,567 INFO [train.py:823] (3/4) Epoch 48, batch 450, loss[loss=0.1561, simple_loss=0.2408, pruned_loss=0.03569, over 7297.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2434, pruned_loss=0.03176, over 1279941.44 frames.], batch size: 17, lr: 3.54e-04 +2022-05-28 00:00:15,817 INFO [train.py:823] (3/4) Epoch 48, batch 500, loss[loss=0.1492, simple_loss=0.2528, pruned_loss=0.02278, over 7201.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2444, pruned_loss=0.03187, over 1310454.55 frames.], batch size: 20, lr: 3.54e-04 +2022-05-28 00:00:54,518 INFO [train.py:823] (3/4) Epoch 48, batch 550, loss[loss=0.1533, simple_loss=0.2554, pruned_loss=0.02559, over 7421.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2442, pruned_loss=0.03175, over 1330320.80 frames.], batch size: 22, lr: 3.54e-04 +2022-05-28 00:01:33,268 INFO [train.py:823] (3/4) Epoch 48, batch 600, loss[loss=0.1455, simple_loss=0.2426, pruned_loss=0.02419, over 7281.00 frames.], tot_loss[loss=0.1543, simple_loss=0.245, pruned_loss=0.03181, over 1350349.87 frames.], batch size: 20, lr: 3.54e-04 +2022-05-28 00:02:11,686 INFO [train.py:823] (3/4) Epoch 48, batch 650, loss[loss=0.1633, simple_loss=0.2556, pruned_loss=0.03553, over 7379.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2449, pruned_loss=0.03167, over 1364111.63 frames.], batch size: 21, lr: 3.54e-04 +2022-05-28 00:02:51,237 INFO [train.py:823] (3/4) Epoch 48, batch 700, loss[loss=0.1815, simple_loss=0.2683, pruned_loss=0.04737, over 7170.00 frames.], tot_loss[loss=0.1547, simple_loss=0.2456, pruned_loss=0.03183, over 1371771.46 frames.], batch size: 22, lr: 3.53e-04 +2022-05-28 00:03:30,269 INFO [train.py:823] (3/4) Epoch 48, batch 750, loss[loss=0.1599, simple_loss=0.2494, pruned_loss=0.03526, over 7104.00 frames.], tot_loss[loss=0.1539, simple_loss=0.245, pruned_loss=0.0314, over 1384600.24 frames.], batch size: 19, lr: 3.53e-04 +2022-05-28 00:04:09,561 INFO [train.py:823] (3/4) Epoch 48, batch 800, loss[loss=0.146, simple_loss=0.2436, pruned_loss=0.02427, over 7341.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2445, pruned_loss=0.03151, over 1391852.25 frames.], batch size: 23, lr: 3.53e-04 +2022-05-28 00:04:48,533 INFO [train.py:823] (3/4) Epoch 48, batch 850, loss[loss=0.1483, simple_loss=0.2431, pruned_loss=0.02675, over 7299.00 frames.], tot_loss[loss=0.1533, simple_loss=0.244, pruned_loss=0.0313, over 1392627.79 frames.], batch size: 17, lr: 3.53e-04 +2022-05-28 00:05:27,434 INFO [train.py:823] (3/4) Epoch 48, batch 900, loss[loss=0.1256, simple_loss=0.2169, pruned_loss=0.01721, over 7294.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2436, pruned_loss=0.0309, over 1395086.31 frames.], batch size: 19, lr: 3.53e-04 +2022-05-28 00:06:18,162 INFO [train.py:823] (3/4) Epoch 49, batch 0, loss[loss=0.1554, simple_loss=0.2524, pruned_loss=0.02919, over 7376.00 frames.], tot_loss[loss=0.1554, simple_loss=0.2524, pruned_loss=0.02919, over 7376.00 frames.], batch size: 20, lr: 3.49e-04 +2022-05-28 00:06:57,261 INFO [train.py:823] (3/4) Epoch 49, batch 50, loss[loss=0.1374, simple_loss=0.2425, pruned_loss=0.01613, over 7277.00 frames.], tot_loss[loss=0.1541, simple_loss=0.2453, pruned_loss=0.03145, over 318493.05 frames.], batch size: 21, lr: 3.49e-04 +2022-05-28 00:07:37,585 INFO [train.py:823] (3/4) Epoch 49, batch 100, loss[loss=0.1492, simple_loss=0.2398, pruned_loss=0.02936, over 7190.00 frames.], tot_loss[loss=0.1542, simple_loss=0.2451, pruned_loss=0.03167, over 560555.37 frames.], batch size: 18, lr: 3.48e-04 +2022-05-28 00:08:16,704 INFO [train.py:823] (3/4) Epoch 49, batch 150, loss[loss=0.1817, simple_loss=0.2629, pruned_loss=0.05028, over 4737.00 frames.], tot_loss[loss=0.153, simple_loss=0.2435, pruned_loss=0.03124, over 750857.87 frames.], batch size: 46, lr: 3.48e-04 +2022-05-28 00:08:56,119 INFO [train.py:823] (3/4) Epoch 49, batch 200, loss[loss=0.1488, simple_loss=0.2473, pruned_loss=0.02511, over 7144.00 frames.], tot_loss[loss=0.152, simple_loss=0.242, pruned_loss=0.03099, over 901890.46 frames.], batch size: 23, lr: 3.48e-04 +2022-05-28 00:09:37,996 INFO [train.py:823] (3/4) Epoch 49, batch 250, loss[loss=0.1749, simple_loss=0.2594, pruned_loss=0.04521, over 7192.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2431, pruned_loss=0.03116, over 1022260.87 frames.], batch size: 20, lr: 3.48e-04 +2022-05-28 00:10:16,997 INFO [train.py:823] (3/4) Epoch 49, batch 300, loss[loss=0.1402, simple_loss=0.2258, pruned_loss=0.02735, over 7296.00 frames.], tot_loss[loss=0.1516, simple_loss=0.2417, pruned_loss=0.03077, over 1113947.56 frames.], batch size: 18, lr: 3.48e-04 +2022-05-28 00:10:56,123 INFO [train.py:823] (3/4) Epoch 49, batch 350, loss[loss=0.1673, simple_loss=0.2648, pruned_loss=0.03492, over 7221.00 frames.], tot_loss[loss=0.1514, simple_loss=0.2417, pruned_loss=0.03058, over 1177552.32 frames.], batch size: 25, lr: 3.48e-04 +2022-05-28 00:11:35,260 INFO [train.py:823] (3/4) Epoch 49, batch 400, loss[loss=0.1414, simple_loss=0.221, pruned_loss=0.03088, over 7003.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2422, pruned_loss=0.03108, over 1227909.30 frames.], batch size: 16, lr: 3.47e-04 +2022-05-28 00:12:14,628 INFO [train.py:823] (3/4) Epoch 49, batch 450, loss[loss=0.1821, simple_loss=0.2737, pruned_loss=0.04524, over 7254.00 frames.], tot_loss[loss=0.1519, simple_loss=0.2418, pruned_loss=0.03101, over 1272409.68 frames.], batch size: 24, lr: 3.47e-04 +2022-05-28 00:12:54,463 INFO [train.py:823] (3/4) Epoch 49, batch 500, loss[loss=0.1622, simple_loss=0.2614, pruned_loss=0.03148, over 6452.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2425, pruned_loss=0.03116, over 1303818.04 frames.], batch size: 34, lr: 3.47e-04 +2022-05-28 00:13:33,856 INFO [train.py:823] (3/4) Epoch 49, batch 550, loss[loss=0.1622, simple_loss=0.2363, pruned_loss=0.044, over 7296.00 frames.], tot_loss[loss=0.1513, simple_loss=0.2415, pruned_loss=0.03052, over 1330746.28 frames.], batch size: 17, lr: 3.47e-04 +2022-05-28 00:14:12,744 INFO [train.py:823] (3/4) Epoch 49, batch 600, loss[loss=0.153, simple_loss=0.2423, pruned_loss=0.03182, over 7211.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2424, pruned_loss=0.03112, over 1350247.35 frames.], batch size: 24, lr: 3.47e-04 +2022-05-28 00:14:52,415 INFO [train.py:823] (3/4) Epoch 49, batch 650, loss[loss=0.1338, simple_loss=0.2213, pruned_loss=0.02318, over 7154.00 frames.], tot_loss[loss=0.1523, simple_loss=0.2425, pruned_loss=0.03102, over 1366297.97 frames.], batch size: 17, lr: 3.46e-04 +2022-05-28 00:15:31,446 INFO [train.py:823] (3/4) Epoch 49, batch 700, loss[loss=0.1726, simple_loss=0.271, pruned_loss=0.03706, over 7419.00 frames.], tot_loss[loss=0.1534, simple_loss=0.2439, pruned_loss=0.0315, over 1370408.54 frames.], batch size: 22, lr: 3.46e-04 +2022-05-28 00:16:11,000 INFO [train.py:823] (3/4) Epoch 49, batch 750, loss[loss=0.1437, simple_loss=0.235, pruned_loss=0.02617, over 7298.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2438, pruned_loss=0.03116, over 1381062.66 frames.], batch size: 19, lr: 3.46e-04 +2022-05-28 00:16:49,940 INFO [train.py:823] (3/4) Epoch 49, batch 800, loss[loss=0.1133, simple_loss=0.198, pruned_loss=0.01434, over 7168.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2435, pruned_loss=0.03104, over 1384913.20 frames.], batch size: 17, lr: 3.46e-04 +2022-05-28 00:17:29,766 INFO [train.py:823] (3/4) Epoch 49, batch 850, loss[loss=0.1373, simple_loss=0.2279, pruned_loss=0.0234, over 7099.00 frames.], tot_loss[loss=0.1535, simple_loss=0.2441, pruned_loss=0.03139, over 1391293.57 frames.], batch size: 18, lr: 3.46e-04 +2022-05-28 00:18:08,832 INFO [train.py:823] (3/4) Epoch 49, batch 900, loss[loss=0.1672, simple_loss=0.2517, pruned_loss=0.04141, over 6488.00 frames.], tot_loss[loss=0.1538, simple_loss=0.2444, pruned_loss=0.03158, over 1394025.69 frames.], batch size: 34, lr: 3.45e-04 +2022-05-28 00:19:00,852 INFO [train.py:823] (3/4) Epoch 50, batch 0, loss[loss=0.1583, simple_loss=0.2463, pruned_loss=0.0351, over 6936.00 frames.], tot_loss[loss=0.1583, simple_loss=0.2463, pruned_loss=0.0351, over 6936.00 frames.], batch size: 29, lr: 3.42e-04 +2022-05-28 00:19:40,046 INFO [train.py:823] (3/4) Epoch 50, batch 50, loss[loss=0.1427, simple_loss=0.2397, pruned_loss=0.02286, over 7276.00 frames.], tot_loss[loss=0.1478, simple_loss=0.2369, pruned_loss=0.02934, over 322107.71 frames.], batch size: 20, lr: 3.42e-04 +2022-05-28 00:20:19,188 INFO [train.py:823] (3/4) Epoch 50, batch 100, loss[loss=0.1507, simple_loss=0.2425, pruned_loss=0.0295, over 7174.00 frames.], tot_loss[loss=0.1497, simple_loss=0.2395, pruned_loss=0.02994, over 564213.30 frames.], batch size: 23, lr: 3.41e-04 +2022-05-28 00:20:58,265 INFO [train.py:823] (3/4) Epoch 50, batch 150, loss[loss=0.1601, simple_loss=0.2476, pruned_loss=0.03627, over 7376.00 frames.], tot_loss[loss=0.1536, simple_loss=0.2435, pruned_loss=0.03184, over 753055.60 frames.], batch size: 21, lr: 3.41e-04 +2022-05-28 00:21:37,490 INFO [train.py:823] (3/4) Epoch 50, batch 200, loss[loss=0.1406, simple_loss=0.2335, pruned_loss=0.0238, over 7096.00 frames.], tot_loss[loss=0.153, simple_loss=0.2437, pruned_loss=0.03118, over 902498.41 frames.], batch size: 18, lr: 3.41e-04 +2022-05-28 00:22:16,582 INFO [train.py:823] (3/4) Epoch 50, batch 250, loss[loss=0.1699, simple_loss=0.2612, pruned_loss=0.03928, over 7186.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2428, pruned_loss=0.03102, over 1020201.78 frames.], batch size: 22, lr: 3.41e-04 +2022-05-28 00:22:55,531 INFO [train.py:823] (3/4) Epoch 50, batch 300, loss[loss=0.1665, simple_loss=0.2594, pruned_loss=0.03686, over 7200.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2432, pruned_loss=0.03097, over 1110516.11 frames.], batch size: 20, lr: 3.41e-04 +2022-05-28 00:23:34,711 INFO [train.py:823] (3/4) Epoch 50, batch 350, loss[loss=0.1498, simple_loss=0.2354, pruned_loss=0.0321, over 7428.00 frames.], tot_loss[loss=0.1526, simple_loss=0.2429, pruned_loss=0.03116, over 1178555.04 frames.], batch size: 22, lr: 3.41e-04 +2022-05-28 00:24:13,746 INFO [train.py:823] (3/4) Epoch 50, batch 400, loss[loss=0.1553, simple_loss=0.2438, pruned_loss=0.03344, over 7039.00 frames.], tot_loss[loss=0.1517, simple_loss=0.2416, pruned_loss=0.03087, over 1232927.42 frames.], batch size: 26, lr: 3.40e-04 +2022-05-28 00:24:52,214 INFO [train.py:823] (3/4) Epoch 50, batch 450, loss[loss=0.1543, simple_loss=0.247, pruned_loss=0.03085, over 6612.00 frames.], tot_loss[loss=0.1524, simple_loss=0.2426, pruned_loss=0.03108, over 1274024.13 frames.], batch size: 34, lr: 3.40e-04 +2022-05-28 00:25:31,621 INFO [train.py:823] (3/4) Epoch 50, batch 500, loss[loss=0.1445, simple_loss=0.2363, pruned_loss=0.02631, over 7306.00 frames.], tot_loss[loss=0.1522, simple_loss=0.2427, pruned_loss=0.03082, over 1306809.58 frames.], batch size: 19, lr: 3.40e-04 +2022-05-28 00:26:10,825 INFO [train.py:823] (3/4) Epoch 50, batch 550, loss[loss=0.1525, simple_loss=0.2426, pruned_loss=0.0312, over 7202.00 frames.], tot_loss[loss=0.1525, simple_loss=0.2431, pruned_loss=0.03095, over 1334806.25 frames.], batch size: 24, lr: 3.40e-04 +2022-05-28 00:26:49,648 INFO [train.py:823] (3/4) Epoch 50, batch 600, loss[loss=0.1389, simple_loss=0.2249, pruned_loss=0.02647, over 7010.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2432, pruned_loss=0.03111, over 1353035.86 frames.], batch size: 16, lr: 3.40e-04 +2022-05-28 00:27:28,450 INFO [train.py:823] (3/4) Epoch 50, batch 650, loss[loss=0.1276, simple_loss=0.2074, pruned_loss=0.02389, over 7008.00 frames.], tot_loss[loss=0.1528, simple_loss=0.2437, pruned_loss=0.03095, over 1363871.23 frames.], batch size: 16, lr: 3.39e-04 +2022-05-28 00:28:07,699 INFO [train.py:823] (3/4) Epoch 50, batch 700, loss[loss=0.1458, simple_loss=0.2361, pruned_loss=0.02769, over 7001.00 frames.], tot_loss[loss=0.1527, simple_loss=0.2433, pruned_loss=0.03106, over 1376281.23 frames.], batch size: 16, lr: 3.39e-04 +2022-05-28 00:28:46,802 INFO [train.py:823] (3/4) Epoch 50, batch 750, loss[loss=0.159, simple_loss=0.2564, pruned_loss=0.03076, over 7299.00 frames.], tot_loss[loss=0.153, simple_loss=0.2436, pruned_loss=0.03116, over 1382934.02 frames.], batch size: 22, lr: 3.39e-04 +2022-05-28 00:29:26,223 INFO [train.py:823] (3/4) Epoch 50, batch 800, loss[loss=0.1421, simple_loss=0.2396, pruned_loss=0.0223, over 7093.00 frames.], tot_loss[loss=0.1533, simple_loss=0.2436, pruned_loss=0.03143, over 1390690.65 frames.], batch size: 19, lr: 3.39e-04 +2022-05-28 00:30:05,682 INFO [train.py:823] (3/4) Epoch 50, batch 850, loss[loss=0.1693, simple_loss=0.2559, pruned_loss=0.04131, over 4888.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2437, pruned_loss=0.03123, over 1395477.55 frames.], batch size: 47, lr: 3.39e-04 +2022-05-28 00:30:45,832 INFO [train.py:823] (3/4) Epoch 50, batch 900, loss[loss=0.1487, simple_loss=0.2461, pruned_loss=0.02562, over 6423.00 frames.], tot_loss[loss=0.1531, simple_loss=0.2439, pruned_loss=0.03119, over 1397783.08 frames.], batch size: 34, lr: 3.39e-04 +2022-05-28 00:31:24,792 INFO [train.py:1038] (3/4) Done!